1use crate::basis::fourier_basis_with_period;
13use crate::fdata::deriv_1d;
14use crate::{iter_maybe_parallel, slice_maybe_parallel};
15use num_complex::Complex;
16#[cfg(feature = "parallel")]
17use rayon::iter::ParallelIterator;
18use rustfft::FftPlanner;
19use std::f64::consts::PI;
20
21#[derive(Debug, Clone)]
23pub struct PeriodEstimate {
24 pub period: f64,
26 pub frequency: f64,
28 pub power: f64,
30 pub confidence: f64,
32}
33
34#[derive(Debug, Clone)]
36pub struct Peak {
37 pub time: f64,
39 pub value: f64,
41 pub prominence: f64,
43}
44
45#[derive(Debug, Clone)]
47pub struct PeakDetectionResult {
48 pub peaks: Vec<Vec<Peak>>,
50 pub inter_peak_distances: Vec<Vec<f64>>,
52 pub mean_period: f64,
54}
55
56#[derive(Debug, Clone)]
58pub struct DetectedPeriod {
59 pub period: f64,
61 pub confidence: f64,
63 pub strength: f64,
65 pub amplitude: f64,
67 pub phase: f64,
69 pub iteration: usize,
71}
72
73#[derive(Debug, Clone, Copy, PartialEq, Eq)]
75pub enum StrengthMethod {
76 Variance,
78 Spectral,
80}
81
82#[derive(Debug, Clone)]
84pub struct ChangePoint {
85 pub time: f64,
87 pub change_type: ChangeType,
89 pub strength_before: f64,
91 pub strength_after: f64,
93}
94
95#[derive(Debug, Clone, Copy, PartialEq, Eq)]
97pub enum ChangeType {
98 Onset,
100 Cessation,
102}
103
104#[derive(Debug, Clone)]
106pub struct ChangeDetectionResult {
107 pub change_points: Vec<ChangePoint>,
109 pub strength_curve: Vec<f64>,
111}
112
113#[derive(Debug, Clone)]
115pub struct InstantaneousPeriod {
116 pub period: Vec<f64>,
118 pub frequency: Vec<f64>,
120 pub amplitude: Vec<f64>,
122}
123
124#[derive(Debug, Clone)]
126pub struct PeakTimingResult {
127 pub peak_times: Vec<f64>,
129 pub peak_values: Vec<f64>,
131 pub normalized_timing: Vec<f64>,
133 pub mean_timing: f64,
135 pub std_timing: f64,
137 pub range_timing: f64,
139 pub variability_score: f64,
141 pub timing_trend: f64,
143 pub cycle_indices: Vec<usize>,
145}
146
147#[derive(Debug, Clone, Copy, PartialEq, Eq)]
149pub enum SeasonalType {
150 StableSeasonal,
152 VariableTiming,
154 IntermittentSeasonal,
156 NonSeasonal,
158}
159
160#[derive(Debug, Clone)]
162pub struct SeasonalityClassification {
163 pub is_seasonal: bool,
165 pub has_stable_timing: bool,
167 pub timing_variability: f64,
169 pub seasonal_strength: f64,
171 pub cycle_strengths: Vec<f64>,
173 pub weak_seasons: Vec<usize>,
175 pub classification: SeasonalType,
177 pub peak_timing: Option<PeakTimingResult>,
179}
180
181#[derive(Debug, Clone, Copy, PartialEq)]
183pub enum ThresholdMethod {
184 Fixed(f64),
186 Percentile(f64),
188 Otsu,
190}
191
192#[derive(Debug, Clone, Copy, PartialEq, Eq)]
194pub enum ModulationType {
195 Stable,
197 Emerging,
199 Fading,
201 Oscillating,
203 NonSeasonal,
205}
206
207#[derive(Debug, Clone)]
209pub struct AmplitudeModulationResult {
210 pub is_seasonal: bool,
212 pub seasonal_strength: f64,
214 pub has_modulation: bool,
216 pub modulation_type: ModulationType,
218 pub modulation_score: f64,
220 pub amplitude_trend: f64,
222 pub strength_curve: Vec<f64>,
224 pub time_points: Vec<f64>,
226 pub min_strength: f64,
228 pub max_strength: f64,
230}
231
232#[derive(Debug, Clone)]
234pub struct WaveletAmplitudeResult {
235 pub is_seasonal: bool,
237 pub seasonal_strength: f64,
239 pub has_modulation: bool,
241 pub modulation_type: ModulationType,
243 pub modulation_score: f64,
245 pub amplitude_trend: f64,
247 pub wavelet_amplitude: Vec<f64>,
249 pub time_points: Vec<f64>,
251 pub scale: f64,
253}
254
255#[inline]
270fn compute_mean_curve_impl(data: &[f64], n: usize, m: usize, parallel: bool) -> Vec<f64> {
271 if parallel && m >= 100 {
272 iter_maybe_parallel!(0..m)
274 .map(|j| {
275 let mut sum = 0.0;
276 for i in 0..n {
277 sum += data[i + j * n];
278 }
279 sum / n as f64
280 })
281 .collect()
282 } else {
283 (0..m)
285 .map(|j| {
286 let mut sum = 0.0;
287 for i in 0..n {
288 sum += data[i + j * n];
289 }
290 sum / n as f64
291 })
292 .collect()
293 }
294}
295
296#[inline]
298fn compute_mean_curve(data: &[f64], n: usize, m: usize) -> Vec<f64> {
299 compute_mean_curve_impl(data, n, m, true)
300}
301
302#[inline]
312fn interior_bounds(m: usize) -> Option<(usize, usize)> {
313 let edge_skip = (m as f64 * 0.1) as usize;
314 let interior_start = edge_skip.min(m / 4);
315 let interior_end = m.saturating_sub(edge_skip).max(m * 3 / 4);
316
317 if interior_end <= interior_start {
318 None
319 } else {
320 Some((interior_start, interior_end))
321 }
322}
323
324fn periodogram(data: &[f64], argvals: &[f64]) -> (Vec<f64>, Vec<f64>) {
327 let n = data.len();
328 if n < 2 || argvals.len() != n {
329 return (Vec::new(), Vec::new());
330 }
331
332 let dt = (argvals[n - 1] - argvals[0]) / (n - 1) as f64;
333 let fs = 1.0 / dt; let mut planner = FftPlanner::<f64>::new();
336 let fft = planner.plan_fft_forward(n);
337
338 let mut buffer: Vec<Complex<f64>> = data.iter().map(|&x| Complex::new(x, 0.0)).collect();
339 fft.process(&mut buffer);
340
341 let n_freq = n / 2 + 1;
343 let mut power = Vec::with_capacity(n_freq);
344 let mut frequencies = Vec::with_capacity(n_freq);
345
346 for k in 0..n_freq {
347 let freq = k as f64 * fs / n as f64;
348 frequencies.push(freq);
349
350 let p = buffer[k].norm_sqr() / (n as f64 * n as f64);
351 let p = if k > 0 && k < n / 2 { 2.0 * p } else { p };
353 power.push(p);
354 }
355
356 (frequencies, power)
357}
358
359fn autocorrelation(data: &[f64], max_lag: usize) -> Vec<f64> {
361 let n = data.len();
362 if n == 0 {
363 return Vec::new();
364 }
365
366 let mean: f64 = data.iter().sum::<f64>() / n as f64;
367 let var: f64 = data.iter().map(|&x| (x - mean).powi(2)).sum::<f64>() / n as f64;
368
369 if var < 1e-15 {
370 return vec![1.0; max_lag.min(n) + 1];
371 }
372
373 let max_lag = max_lag.min(n - 1);
374 let mut acf = Vec::with_capacity(max_lag + 1);
375
376 for lag in 0..=max_lag {
377 let mut sum = 0.0;
378 for i in 0..(n - lag) {
379 sum += (data[i] - mean) * (data[i + lag] - mean);
380 }
381 acf.push(sum / (n as f64 * var));
382 }
383
384 acf
385}
386
387fn find_peaks_1d(signal: &[f64], min_distance: usize) -> Vec<usize> {
389 let n = signal.len();
390 if n < 3 {
391 return Vec::new();
392 }
393
394 let mut peaks = Vec::new();
395
396 for i in 1..(n - 1) {
397 if signal[i] > signal[i - 1] && signal[i] > signal[i + 1] {
398 if peaks.is_empty() || i - peaks[peaks.len() - 1] >= min_distance {
400 peaks.push(i);
401 } else if signal[i] > signal[peaks[peaks.len() - 1]] {
402 peaks.pop();
404 peaks.push(i);
405 }
406 }
407 }
408
409 peaks
410}
411
412fn compute_prominence(signal: &[f64], peak_idx: usize) -> f64 {
414 let n = signal.len();
415 let peak_val = signal[peak_idx];
416
417 let mut left_min = peak_val;
419 for i in (0..peak_idx).rev() {
420 if signal[i] >= peak_val {
421 break;
422 }
423 left_min = left_min.min(signal[i]);
424 }
425
426 let mut right_min = peak_val;
427 for i in (peak_idx + 1)..n {
428 if signal[i] >= peak_val {
429 break;
430 }
431 right_min = right_min.min(signal[i]);
432 }
433
434 peak_val - left_min.max(right_min)
435}
436
437pub fn hilbert_transform(signal: &[f64]) -> Vec<Complex<f64>> {
445 let n = signal.len();
446 if n == 0 {
447 return Vec::new();
448 }
449
450 let mut planner = FftPlanner::<f64>::new();
451 let fft_forward = planner.plan_fft_forward(n);
452 let fft_inverse = planner.plan_fft_inverse(n);
453
454 let mut buffer: Vec<Complex<f64>> = signal.iter().map(|&x| Complex::new(x, 0.0)).collect();
456 fft_forward.process(&mut buffer);
457
458 let half = n / 2;
461 for k in 1..half {
462 buffer[k] *= 2.0;
463 }
464 for k in (half + 1)..n {
465 buffer[k] = Complex::new(0.0, 0.0);
466 }
467
468 fft_inverse.process(&mut buffer);
470
471 for c in buffer.iter_mut() {
473 *c /= n as f64;
474 }
475
476 buffer
477}
478
479fn unwrap_phase(phase: &[f64]) -> Vec<f64> {
481 if phase.is_empty() {
482 return Vec::new();
483 }
484
485 let mut unwrapped = vec![phase[0]];
486 let mut cumulative_correction = 0.0;
487
488 for i in 1..phase.len() {
489 let diff = phase[i] - phase[i - 1];
490
491 if diff > PI {
493 cumulative_correction -= 2.0 * PI;
494 } else if diff < -PI {
495 cumulative_correction += 2.0 * PI;
496 }
497
498 unwrapped.push(phase[i] + cumulative_correction);
499 }
500
501 unwrapped
502}
503
504fn morlet_wavelet(t: f64, omega0: f64) -> Complex<f64> {
511 let gaussian = (-t * t / 2.0).exp();
512 let oscillation = Complex::new((omega0 * t).cos(), (omega0 * t).sin());
513 oscillation * gaussian
514}
515
516#[allow(dead_code)]
530fn cwt_morlet(signal: &[f64], argvals: &[f64], scale: f64, omega0: f64) -> Vec<Complex<f64>> {
531 let n = signal.len();
532 if n == 0 || scale <= 0.0 {
533 return Vec::new();
534 }
535
536 let dt = (argvals[n - 1] - argvals[0]) / (n - 1) as f64;
537
538 let norm = 1.0 / scale.sqrt();
541
542 (0..n)
543 .map(|b| {
544 let mut sum = Complex::new(0.0, 0.0);
545 for k in 0..n {
546 let t_normalized = (argvals[k] - argvals[b]) / scale;
547 if t_normalized.abs() < 6.0 {
549 let wavelet = morlet_wavelet(t_normalized, omega0);
550 sum += signal[k] * wavelet.conj();
551 }
552 }
553 sum * norm * dt
554 })
555 .collect()
556}
557
558fn cwt_morlet_fft(signal: &[f64], argvals: &[f64], scale: f64, omega0: f64) -> Vec<Complex<f64>> {
562 let n = signal.len();
563 if n == 0 || scale <= 0.0 {
564 return Vec::new();
565 }
566
567 let dt = (argvals[n - 1] - argvals[0]) / (n - 1) as f64;
568 let norm = 1.0 / scale.sqrt();
569
570 let wavelet_time: Vec<Complex<f64>> = (0..n)
572 .map(|k| {
573 let t = if k <= n / 2 {
575 k as f64 * dt / scale
576 } else {
577 (k as f64 - n as f64) * dt / scale
578 };
579 morlet_wavelet(t, omega0) * norm
580 })
581 .collect();
582
583 let mut planner = FftPlanner::<f64>::new();
584 let fft_forward = planner.plan_fft_forward(n);
585 let fft_inverse = planner.plan_fft_inverse(n);
586
587 let mut signal_fft: Vec<Complex<f64>> = signal.iter().map(|&x| Complex::new(x, 0.0)).collect();
589 fft_forward.process(&mut signal_fft);
590
591 let mut wavelet_fft = wavelet_time;
593 fft_forward.process(&mut wavelet_fft);
594
595 let mut result: Vec<Complex<f64>> = signal_fft
597 .iter()
598 .zip(wavelet_fft.iter())
599 .map(|(s, w)| *s * w.conj())
600 .collect();
601
602 fft_inverse.process(&mut result);
604
605 for c in result.iter_mut() {
607 *c *= dt / n as f64;
608 }
609
610 result
611}
612
613fn otsu_threshold(values: &[f64]) -> f64 {
618 if values.is_empty() {
619 return 0.5;
620 }
621
622 let valid: Vec<f64> = values.iter().copied().filter(|x| x.is_finite()).collect();
624 if valid.is_empty() {
625 return 0.5;
626 }
627
628 let min_val = valid.iter().cloned().fold(f64::INFINITY, f64::min);
629 let max_val = valid.iter().cloned().fold(f64::NEG_INFINITY, f64::max);
630
631 if (max_val - min_val).abs() < 1e-10 {
632 return (min_val + max_val) / 2.0;
633 }
634
635 let n_bins = 256;
637 let bin_width = (max_val - min_val) / n_bins as f64;
638 let mut histogram = vec![0usize; n_bins];
639
640 for &v in &valid {
641 let bin = ((v - min_val) / bin_width).min(n_bins as f64 - 1.0) as usize;
642 histogram[bin] += 1;
643 }
644
645 let total = valid.len() as f64;
646 let mut sum_total = 0.0;
647 for (i, &count) in histogram.iter().enumerate() {
648 sum_total += i as f64 * count as f64;
649 }
650
651 let mut best_threshold = min_val;
652 let mut best_variance = 0.0;
653
654 let mut sum_b = 0.0;
655 let mut weight_b = 0.0;
656
657 for t in 0..n_bins {
658 weight_b += histogram[t] as f64;
659 if weight_b == 0.0 {
660 continue;
661 }
662
663 let weight_f = total - weight_b;
664 if weight_f == 0.0 {
665 break;
666 }
667
668 sum_b += t as f64 * histogram[t] as f64;
669
670 let mean_b = sum_b / weight_b;
671 let mean_f = (sum_total - sum_b) / weight_f;
672
673 let variance = weight_b * weight_f * (mean_b - mean_f).powi(2);
675
676 if variance > best_variance {
677 best_variance = variance;
678 best_threshold = min_val + (t as f64 + 0.5) * bin_width;
679 }
680 }
681
682 best_threshold
683}
684
685fn linear_slope(x: &[f64], y: &[f64]) -> f64 {
687 if x.len() != y.len() || x.len() < 2 {
688 return 0.0;
689 }
690
691 let n = x.len() as f64;
692 let mean_x: f64 = x.iter().sum::<f64>() / n;
693 let mean_y: f64 = y.iter().sum::<f64>() / n;
694
695 let mut num = 0.0;
696 let mut den = 0.0;
697
698 for (&xi, &yi) in x.iter().zip(y.iter()) {
699 num += (xi - mean_x) * (yi - mean_y);
700 den += (xi - mean_x).powi(2);
701 }
702
703 if den.abs() < 1e-15 {
704 0.0
705 } else {
706 num / den
707 }
708}
709
710pub fn estimate_period_fft(data: &[f64], n: usize, m: usize, argvals: &[f64]) -> PeriodEstimate {
728 if n == 0 || m < 4 || argvals.len() != m {
729 return PeriodEstimate {
730 period: f64::NAN,
731 frequency: f64::NAN,
732 power: 0.0,
733 confidence: 0.0,
734 };
735 }
736
737 let mean_curve = compute_mean_curve(data, n, m);
739
740 let (frequencies, power) = periodogram(&mean_curve, argvals);
741
742 if frequencies.len() < 2 {
743 return PeriodEstimate {
744 period: f64::NAN,
745 frequency: f64::NAN,
746 power: 0.0,
747 confidence: 0.0,
748 };
749 }
750
751 let mut max_power = 0.0;
753 let mut max_idx = 1;
754 for (i, &p) in power.iter().enumerate().skip(1) {
755 if p > max_power {
756 max_power = p;
757 max_idx = i;
758 }
759 }
760
761 let dominant_freq = frequencies[max_idx];
762 let period = if dominant_freq > 1e-15 {
763 1.0 / dominant_freq
764 } else {
765 f64::INFINITY
766 };
767
768 let mean_power: f64 = power.iter().skip(1).sum::<f64>() / (power.len() - 1) as f64;
770 let confidence = if mean_power > 1e-15 {
771 max_power / mean_power
772 } else {
773 0.0
774 };
775
776 PeriodEstimate {
777 period,
778 frequency: dominant_freq,
779 power: max_power,
780 confidence,
781 }
782}
783
784pub fn estimate_period_acf(
795 data: &[f64],
796 n: usize,
797 m: usize,
798 argvals: &[f64],
799 max_lag: usize,
800) -> PeriodEstimate {
801 if n == 0 || m < 4 || argvals.len() != m {
802 return PeriodEstimate {
803 period: f64::NAN,
804 frequency: f64::NAN,
805 power: 0.0,
806 confidence: 0.0,
807 };
808 }
809
810 let mean_curve = compute_mean_curve(data, n, m);
812
813 let acf = autocorrelation(&mean_curve, max_lag);
814
815 let min_lag = 2;
817 let peaks = find_peaks_1d(&acf[min_lag..], 1);
818
819 if peaks.is_empty() {
820 return PeriodEstimate {
821 period: f64::NAN,
822 frequency: f64::NAN,
823 power: 0.0,
824 confidence: 0.0,
825 };
826 }
827
828 let peak_lag = peaks[0] + min_lag;
829 let dt = (argvals[m - 1] - argvals[0]) / (m - 1) as f64;
830 let period = peak_lag as f64 * dt;
831 let frequency = if period > 1e-15 { 1.0 / period } else { 0.0 };
832
833 PeriodEstimate {
834 period,
835 frequency,
836 power: acf[peak_lag],
837 confidence: acf[peak_lag].abs(),
838 }
839}
840
841pub fn estimate_period_regression(
856 data: &[f64],
857 n: usize,
858 m: usize,
859 argvals: &[f64],
860 period_min: f64,
861 period_max: f64,
862 n_candidates: usize,
863 n_harmonics: usize,
864) -> PeriodEstimate {
865 if n == 0 || m < 4 || argvals.len() != m || period_min >= period_max || n_candidates < 2 {
866 return PeriodEstimate {
867 period: f64::NAN,
868 frequency: f64::NAN,
869 power: 0.0,
870 confidence: 0.0,
871 };
872 }
873
874 let mean_curve = compute_mean_curve(data, n, m);
876
877 let nbasis = 1 + 2 * n_harmonics;
878
879 let candidates: Vec<f64> = (0..n_candidates)
881 .map(|i| period_min + (period_max - period_min) * i as f64 / (n_candidates - 1) as f64)
882 .collect();
883
884 let results: Vec<(f64, f64)> = slice_maybe_parallel!(candidates)
885 .map(|&period| {
886 let basis = fourier_basis_with_period(argvals, nbasis, period);
887
888 let mut rss = 0.0;
890 for j in 0..m {
891 let mut fitted = 0.0;
892 for k in 0..nbasis {
894 let b_val = basis[j + k * m];
895 let coef: f64 = (0..m)
896 .map(|l| mean_curve[l] * basis[l + k * m])
897 .sum::<f64>()
898 / (0..m)
899 .map(|l| basis[l + k * m].powi(2))
900 .sum::<f64>()
901 .max(1e-15);
902 fitted += coef * b_val;
903 }
904 let resid = mean_curve[j] - fitted;
905 rss += resid * resid;
906 }
907
908 (period, rss)
909 })
910 .collect();
911
912 let (best_period, min_rss) = results
914 .iter()
915 .min_by(|a, b| a.1.partial_cmp(&b.1).unwrap_or(std::cmp::Ordering::Equal))
916 .cloned()
917 .unwrap_or((f64::NAN, f64::INFINITY));
918
919 let mean_rss: f64 = results.iter().map(|(_, r)| r).sum::<f64>() / results.len() as f64;
921 let confidence = if min_rss > 1e-15 {
922 (mean_rss / min_rss).min(10.0)
923 } else {
924 10.0
925 };
926
927 PeriodEstimate {
928 period: best_period,
929 frequency: if best_period > 1e-15 {
930 1.0 / best_period
931 } else {
932 0.0
933 },
934 power: 1.0 - min_rss / mean_rss,
935 confidence,
936 }
937}
938
939pub fn detect_multiple_periods(
960 data: &[f64],
961 n: usize,
962 m: usize,
963 argvals: &[f64],
964 max_periods: usize,
965 min_confidence: f64,
966 min_strength: f64,
967) -> Vec<DetectedPeriod> {
968 if n == 0 || m < 4 || argvals.len() != m || max_periods == 0 {
969 return Vec::new();
970 }
971
972 let mean_curve = compute_mean_curve(data, n, m);
974
975 let mut residual = mean_curve.clone();
976 let mut detected = Vec::with_capacity(max_periods);
977
978 for iteration in 1..=max_periods {
979 let residual_data: Vec<f64> = residual.clone();
981
982 let est = estimate_period_fft(&residual_data, 1, m, argvals);
984
985 if est.confidence < min_confidence || est.period.is_nan() || est.period.is_infinite() {
986 break;
987 }
988
989 let strength = seasonal_strength_variance(&residual_data, 1, m, argvals, est.period, 3);
991
992 if strength < min_strength || strength.is_nan() {
993 break;
994 }
995
996 let omega = 2.0 * PI / est.period;
998 let mut cos_sum = 0.0;
999 let mut sin_sum = 0.0;
1000
1001 for (j, &t) in argvals.iter().enumerate() {
1002 cos_sum += residual[j] * (omega * t).cos();
1003 sin_sum += residual[j] * (omega * t).sin();
1004 }
1005
1006 let a = 2.0 * cos_sum / m as f64; let b = 2.0 * sin_sum / m as f64; let amplitude = (a * a + b * b).sqrt();
1009 let phase = b.atan2(a);
1010
1011 detected.push(DetectedPeriod {
1012 period: est.period,
1013 confidence: est.confidence,
1014 strength,
1015 amplitude,
1016 phase,
1017 iteration,
1018 });
1019
1020 for (j, &t) in argvals.iter().enumerate() {
1022 let fitted = a * (omega * t).cos() + b * (omega * t).sin();
1023 residual[j] -= fitted;
1024 }
1025 }
1026
1027 detected
1028}
1029
1030pub fn detect_peaks(
1050 data: &[f64],
1051 n: usize,
1052 m: usize,
1053 argvals: &[f64],
1054 min_distance: Option<f64>,
1055 min_prominence: Option<f64>,
1056 smooth_first: bool,
1057 smooth_nbasis: Option<usize>,
1058) -> PeakDetectionResult {
1059 if n == 0 || m < 3 || argvals.len() != m {
1060 return PeakDetectionResult {
1061 peaks: Vec::new(),
1062 inter_peak_distances: Vec::new(),
1063 mean_period: f64::NAN,
1064 };
1065 }
1066
1067 let dt = (argvals[m - 1] - argvals[0]) / (m - 1) as f64;
1068 let min_dist_points = min_distance.map(|d| (d / dt).round() as usize).unwrap_or(1);
1069
1070 let work_data = if smooth_first {
1072 let nbasis = smooth_nbasis
1074 .unwrap_or_else(|| crate::basis::select_fourier_nbasis_gcv(data, n, m, argvals, 5, 25));
1075
1076 if let Some(result) = crate::basis::fourier_fit_1d(data, n, m, argvals, nbasis) {
1078 result.fitted
1079 } else {
1080 data.to_vec()
1081 }
1082 } else {
1083 data.to_vec()
1084 };
1085
1086 let deriv1 = deriv_1d(&work_data, n, m, argvals, 1);
1088
1089 let data_range: f64 = {
1091 let mut min_val = f64::INFINITY;
1092 let mut max_val = f64::NEG_INFINITY;
1093 for &v in work_data.iter() {
1094 min_val = min_val.min(v);
1095 max_val = max_val.max(v);
1096 }
1097 (max_val - min_val).max(1e-15)
1098 };
1099
1100 let results: Vec<(Vec<Peak>, Vec<f64>)> = iter_maybe_parallel!(0..n)
1102 .map(|i| {
1103 let curve: Vec<f64> = (0..m).map(|j| work_data[i + j * n]).collect();
1105 let d1: Vec<f64> = (0..m).map(|j| deriv1[i + j * n]).collect();
1106
1107 let mut peak_indices = Vec::new();
1109 for j in 1..m {
1110 if d1[j - 1] > 0.0 && d1[j] <= 0.0 {
1111 let idx = if (d1[j - 1] - d1[j]).abs() > 1e-15 {
1113 j - 1
1114 } else {
1115 j
1116 };
1117
1118 if peak_indices.is_empty()
1120 || idx - peak_indices[peak_indices.len() - 1] >= min_dist_points
1121 {
1122 peak_indices.push(idx);
1123 }
1124 }
1125 }
1126
1127 let mut peaks: Vec<Peak> = peak_indices
1129 .iter()
1130 .map(|&idx| {
1131 let prominence = compute_prominence(&curve, idx) / data_range;
1132 Peak {
1133 time: argvals[idx],
1134 value: curve[idx],
1135 prominence,
1136 }
1137 })
1138 .collect();
1139
1140 if let Some(min_prom) = min_prominence {
1142 peaks.retain(|p| p.prominence >= min_prom);
1143 }
1144
1145 let distances: Vec<f64> = peaks.windows(2).map(|w| w[1].time - w[0].time).collect();
1147
1148 (peaks, distances)
1149 })
1150 .collect();
1151
1152 let peaks: Vec<Vec<Peak>> = results.iter().map(|(p, _)| p.clone()).collect();
1153 let inter_peak_distances: Vec<Vec<f64>> = results.iter().map(|(_, d)| d.clone()).collect();
1154
1155 let all_distances: Vec<f64> = inter_peak_distances.iter().flatten().cloned().collect();
1157 let mean_period = if all_distances.is_empty() {
1158 f64::NAN
1159 } else {
1160 all_distances.iter().sum::<f64>() / all_distances.len() as f64
1161 };
1162
1163 PeakDetectionResult {
1164 peaks,
1165 inter_peak_distances,
1166 mean_period,
1167 }
1168}
1169
1170pub fn seasonal_strength_variance(
1190 data: &[f64],
1191 n: usize,
1192 m: usize,
1193 argvals: &[f64],
1194 period: f64,
1195 n_harmonics: usize,
1196) -> f64 {
1197 if n == 0 || m < 4 || argvals.len() != m || period <= 0.0 {
1198 return f64::NAN;
1199 }
1200
1201 let mean_curve = compute_mean_curve(data, n, m);
1203
1204 let global_mean: f64 = mean_curve.iter().sum::<f64>() / m as f64;
1206 let total_var: f64 = mean_curve
1207 .iter()
1208 .map(|&x| (x - global_mean).powi(2))
1209 .sum::<f64>()
1210 / m as f64;
1211
1212 if total_var < 1e-15 {
1213 return 0.0;
1214 }
1215
1216 let nbasis = 1 + 2 * n_harmonics;
1218 let basis = fourier_basis_with_period(argvals, nbasis, period);
1219
1220 let mut seasonal = vec![0.0; m];
1222 for k in 1..nbasis {
1223 let b_sum: f64 = (0..m).map(|j| basis[j + k * m].powi(2)).sum();
1225 if b_sum > 1e-15 {
1226 let coef: f64 = (0..m)
1227 .map(|j| mean_curve[j] * basis[j + k * m])
1228 .sum::<f64>()
1229 / b_sum;
1230 for j in 0..m {
1231 seasonal[j] += coef * basis[j + k * m];
1232 }
1233 }
1234 }
1235
1236 let seasonal_mean: f64 = seasonal.iter().sum::<f64>() / m as f64;
1238 let seasonal_var: f64 = seasonal
1239 .iter()
1240 .map(|&x| (x - seasonal_mean).powi(2))
1241 .sum::<f64>()
1242 / m as f64;
1243
1244 (seasonal_var / total_var).min(1.0)
1245}
1246
1247pub fn seasonal_strength_spectral(
1258 data: &[f64],
1259 n: usize,
1260 m: usize,
1261 argvals: &[f64],
1262 period: f64,
1263) -> f64 {
1264 if n == 0 || m < 4 || argvals.len() != m || period <= 0.0 {
1265 return f64::NAN;
1266 }
1267
1268 let mean_curve = compute_mean_curve(data, n, m);
1270
1271 let (frequencies, power) = periodogram(&mean_curve, argvals);
1272
1273 if frequencies.len() < 2 {
1274 return f64::NAN;
1275 }
1276
1277 let fundamental_freq = 1.0 / period;
1278
1279 let _freq_tol = fundamental_freq * 0.1; let mut seasonal_power = 0.0;
1282 let mut total_power = 0.0;
1283
1284 for (i, (&freq, &p)) in frequencies.iter().zip(power.iter()).enumerate() {
1285 if i == 0 {
1286 continue;
1287 } total_power += p;
1290
1291 let ratio = freq / fundamental_freq;
1293 let nearest_harmonic = ratio.round();
1294 if (ratio - nearest_harmonic).abs() < 0.1 && nearest_harmonic >= 1.0 {
1295 seasonal_power += p;
1296 }
1297 }
1298
1299 if total_power < 1e-15 {
1300 return 0.0;
1301 }
1302
1303 (seasonal_power / total_power).min(1.0)
1304}
1305
1306pub fn seasonal_strength_wavelet(
1327 data: &[f64],
1328 n: usize,
1329 m: usize,
1330 argvals: &[f64],
1331 period: f64,
1332) -> f64 {
1333 if n == 0 || m < 4 || argvals.len() != m || period <= 0.0 {
1334 return f64::NAN;
1335 }
1336
1337 let mean_curve = compute_mean_curve(data, n, m);
1339
1340 let dc: f64 = mean_curve.iter().sum::<f64>() / m as f64;
1342 let detrended: Vec<f64> = mean_curve.iter().map(|&x| x - dc).collect();
1343
1344 let total_variance: f64 = detrended.iter().map(|&x| x * x).sum::<f64>() / m as f64;
1346
1347 if total_variance < 1e-15 {
1348 return 0.0;
1349 }
1350
1351 let omega0 = 6.0;
1353 let scale = period * omega0 / (2.0 * PI);
1354 let wavelet_coeffs = cwt_morlet_fft(&detrended, argvals, scale, omega0);
1355
1356 if wavelet_coeffs.is_empty() {
1357 return f64::NAN;
1358 }
1359
1360 let (interior_start, interior_end) = match interior_bounds(m) {
1362 Some(bounds) => bounds,
1363 None => return f64::NAN,
1364 };
1365
1366 let wavelet_power: f64 = wavelet_coeffs[interior_start..interior_end]
1367 .iter()
1368 .map(|c| c.norm_sqr())
1369 .sum::<f64>()
1370 / (interior_end - interior_start) as f64;
1371
1372 (wavelet_power / total_variance).sqrt().min(1.0)
1375}
1376
1377pub fn seasonal_strength_windowed(
1391 data: &[f64],
1392 n: usize,
1393 m: usize,
1394 argvals: &[f64],
1395 period: f64,
1396 window_size: f64,
1397 method: StrengthMethod,
1398) -> Vec<f64> {
1399 if n == 0 || m < 4 || argvals.len() != m || period <= 0.0 || window_size <= 0.0 {
1400 return Vec::new();
1401 }
1402
1403 let dt = (argvals[m - 1] - argvals[0]) / (m - 1) as f64;
1404 let half_window_points = ((window_size / 2.0) / dt).round() as usize;
1405
1406 let mean_curve = compute_mean_curve(data, n, m);
1408
1409 iter_maybe_parallel!(0..m)
1410 .map(|center| {
1411 let start = center.saturating_sub(half_window_points);
1412 let end = (center + half_window_points + 1).min(m);
1413 let window_m = end - start;
1414
1415 if window_m < 4 {
1416 return f64::NAN;
1417 }
1418
1419 let window_data: Vec<f64> = mean_curve[start..end].to_vec();
1420 let window_argvals: Vec<f64> = argvals[start..end].to_vec();
1421
1422 let single_data = window_data.clone();
1424
1425 match method {
1426 StrengthMethod::Variance => seasonal_strength_variance(
1427 &single_data,
1428 1,
1429 window_m,
1430 &window_argvals,
1431 period,
1432 3,
1433 ),
1434 StrengthMethod::Spectral => {
1435 seasonal_strength_spectral(&single_data, 1, window_m, &window_argvals, period)
1436 }
1437 }
1438 })
1439 .collect()
1440}
1441
1442pub fn detect_seasonality_changes(
1461 data: &[f64],
1462 n: usize,
1463 m: usize,
1464 argvals: &[f64],
1465 period: f64,
1466 threshold: f64,
1467 window_size: f64,
1468 min_duration: f64,
1469) -> ChangeDetectionResult {
1470 if n == 0 || m < 4 || argvals.len() != m {
1471 return ChangeDetectionResult {
1472 change_points: Vec::new(),
1473 strength_curve: Vec::new(),
1474 };
1475 }
1476
1477 let strength_curve = seasonal_strength_windowed(
1479 data,
1480 n,
1481 m,
1482 argvals,
1483 period,
1484 window_size,
1485 StrengthMethod::Variance,
1486 );
1487
1488 if strength_curve.is_empty() {
1489 return ChangeDetectionResult {
1490 change_points: Vec::new(),
1491 strength_curve: Vec::new(),
1492 };
1493 }
1494
1495 let dt = (argvals[m - 1] - argvals[0]) / (m - 1) as f64;
1496 let min_dur_points = (min_duration / dt).round() as usize;
1497
1498 let mut change_points = Vec::new();
1500 let mut in_seasonal = strength_curve[0] > threshold;
1501 let mut last_change_idx: Option<usize> = None;
1502
1503 for (i, &ss) in strength_curve.iter().enumerate().skip(1) {
1504 if ss.is_nan() {
1505 continue;
1506 }
1507
1508 let now_seasonal = ss > threshold;
1509
1510 if now_seasonal != in_seasonal {
1511 if let Some(last_idx) = last_change_idx {
1513 if i - last_idx < min_dur_points {
1514 continue; }
1516 }
1517
1518 let change_type = if now_seasonal {
1519 ChangeType::Onset
1520 } else {
1521 ChangeType::Cessation
1522 };
1523
1524 let strength_before = if i > 0 && !strength_curve[i - 1].is_nan() {
1525 strength_curve[i - 1]
1526 } else {
1527 ss
1528 };
1529
1530 change_points.push(ChangePoint {
1531 time: argvals[i],
1532 change_type,
1533 strength_before,
1534 strength_after: ss,
1535 });
1536
1537 in_seasonal = now_seasonal;
1538 last_change_idx = Some(i);
1539 }
1540 }
1541
1542 ChangeDetectionResult {
1543 change_points,
1544 strength_curve,
1545 }
1546}
1547
1548pub fn detect_amplitude_modulation(
1587 data: &[f64],
1588 n: usize,
1589 m: usize,
1590 argvals: &[f64],
1591 period: f64,
1592 modulation_threshold: f64,
1593 seasonality_threshold: f64,
1594) -> AmplitudeModulationResult {
1595 let empty_result = AmplitudeModulationResult {
1597 is_seasonal: false,
1598 seasonal_strength: 0.0,
1599 has_modulation: false,
1600 modulation_type: ModulationType::NonSeasonal,
1601 modulation_score: 0.0,
1602 amplitude_trend: 0.0,
1603 strength_curve: Vec::new(),
1604 time_points: Vec::new(),
1605 min_strength: 0.0,
1606 max_strength: 0.0,
1607 };
1608
1609 if n == 0 || m < 4 || argvals.len() != m || period <= 0.0 {
1610 return empty_result;
1611 }
1612
1613 let overall_strength = seasonal_strength_spectral(data, n, m, argvals, period);
1615
1616 if overall_strength < seasonality_threshold {
1617 return AmplitudeModulationResult {
1618 is_seasonal: false,
1619 seasonal_strength: overall_strength,
1620 has_modulation: false,
1621 modulation_type: ModulationType::NonSeasonal,
1622 modulation_score: 0.0,
1623 amplitude_trend: 0.0,
1624 strength_curve: Vec::new(),
1625 time_points: argvals.to_vec(),
1626 min_strength: 0.0,
1627 max_strength: 0.0,
1628 };
1629 }
1630
1631 let mean_curve = compute_mean_curve(data, n, m);
1633
1634 let dc: f64 = mean_curve.iter().sum::<f64>() / m as f64;
1636 let detrended: Vec<f64> = mean_curve.iter().map(|&x| x - dc).collect();
1637 let analytic = hilbert_transform(&detrended);
1638 let envelope: Vec<f64> = analytic.iter().map(|c| c.norm()).collect();
1639
1640 if envelope.is_empty() {
1641 return AmplitudeModulationResult {
1642 is_seasonal: true,
1643 seasonal_strength: overall_strength,
1644 has_modulation: false,
1645 modulation_type: ModulationType::Stable,
1646 modulation_score: 0.0,
1647 amplitude_trend: 0.0,
1648 strength_curve: Vec::new(),
1649 time_points: argvals.to_vec(),
1650 min_strength: 0.0,
1651 max_strength: 0.0,
1652 };
1653 }
1654
1655 let dt = (argvals[m - 1] - argvals[0]) / (m - 1) as f64;
1658 let smooth_window = ((period / dt) as usize).max(3);
1659 let half_window = smooth_window / 2;
1660
1661 let smoothed_envelope: Vec<f64> = (0..m)
1662 .map(|i| {
1663 let start = i.saturating_sub(half_window);
1664 let end = (i + half_window + 1).min(m);
1665 let sum: f64 = envelope[start..end].iter().sum();
1666 sum / (end - start) as f64
1667 })
1668 .collect();
1669
1670 let (interior_start, interior_end) = match interior_bounds(m) {
1673 Some((s, e)) if e > s + 4 => (s, e),
1674 _ => {
1675 return AmplitudeModulationResult {
1676 is_seasonal: true,
1677 seasonal_strength: overall_strength,
1678 has_modulation: false,
1679 modulation_type: ModulationType::Stable,
1680 modulation_score: 0.0,
1681 amplitude_trend: 0.0,
1682 strength_curve: envelope,
1683 time_points: argvals.to_vec(),
1684 min_strength: 0.0,
1685 max_strength: 0.0,
1686 };
1687 }
1688 };
1689
1690 let interior_envelope = &smoothed_envelope[interior_start..interior_end];
1691 let interior_times = &argvals[interior_start..interior_end];
1692 let n_interior = interior_envelope.len() as f64;
1693
1694 let mean_amp = interior_envelope.iter().sum::<f64>() / n_interior;
1695 let min_amp = interior_envelope
1696 .iter()
1697 .cloned()
1698 .fold(f64::INFINITY, f64::min);
1699 let max_amp = interior_envelope
1700 .iter()
1701 .cloned()
1702 .fold(f64::NEG_INFINITY, f64::max);
1703
1704 let variance = interior_envelope
1706 .iter()
1707 .map(|&a| (a - mean_amp).powi(2))
1708 .sum::<f64>()
1709 / n_interior;
1710 let std_amp = variance.sqrt();
1711 let modulation_score = if mean_amp > 1e-10 {
1712 std_amp / mean_amp
1713 } else {
1714 0.0
1715 };
1716
1717 let t_mean = interior_times.iter().sum::<f64>() / n_interior;
1719 let mut cov_ta = 0.0;
1720 let mut var_t = 0.0;
1721 for (&t, &a) in interior_times.iter().zip(interior_envelope.iter()) {
1722 cov_ta += (t - t_mean) * (a - mean_amp);
1723 var_t += (t - t_mean).powi(2);
1724 }
1725 let slope = if var_t > 1e-10 { cov_ta / var_t } else { 0.0 };
1726
1727 let time_span = interior_times.last().unwrap_or(&1.0) - interior_times.first().unwrap_or(&0.0);
1729 let amp_range = max_amp - min_amp;
1730 let amplitude_trend = if amp_range > 1e-10 && time_span > 1e-10 && mean_amp > 1e-10 {
1731 (slope * time_span / mean_amp).clamp(-1.0, 1.0)
1733 } else {
1734 0.0
1735 };
1736
1737 let has_modulation = modulation_score > modulation_threshold;
1739 let modulation_type = if !has_modulation {
1740 ModulationType::Stable
1741 } else if amplitude_trend > 0.3 {
1742 ModulationType::Emerging
1743 } else if amplitude_trend < -0.3 {
1744 ModulationType::Fading
1745 } else {
1746 ModulationType::Oscillating
1747 };
1748
1749 AmplitudeModulationResult {
1750 is_seasonal: true,
1751 seasonal_strength: overall_strength,
1752 has_modulation,
1753 modulation_type,
1754 modulation_score,
1755 amplitude_trend,
1756 strength_curve: envelope,
1757 time_points: argvals.to_vec(),
1758 min_strength: min_amp,
1759 max_strength: max_amp,
1760 }
1761}
1762
1763pub fn detect_amplitude_modulation_wavelet(
1786 data: &[f64],
1787 n: usize,
1788 m: usize,
1789 argvals: &[f64],
1790 period: f64,
1791 modulation_threshold: f64,
1792 seasonality_threshold: f64,
1793) -> WaveletAmplitudeResult {
1794 let empty_result = WaveletAmplitudeResult {
1795 is_seasonal: false,
1796 seasonal_strength: 0.0,
1797 has_modulation: false,
1798 modulation_type: ModulationType::NonSeasonal,
1799 modulation_score: 0.0,
1800 amplitude_trend: 0.0,
1801 wavelet_amplitude: Vec::new(),
1802 time_points: Vec::new(),
1803 scale: 0.0,
1804 };
1805
1806 if n == 0 || m < 4 || argvals.len() != m || period <= 0.0 {
1807 return empty_result;
1808 }
1809
1810 let overall_strength = seasonal_strength_spectral(data, n, m, argvals, period);
1812
1813 if overall_strength < seasonality_threshold {
1814 return WaveletAmplitudeResult {
1815 is_seasonal: false,
1816 seasonal_strength: overall_strength,
1817 has_modulation: false,
1818 modulation_type: ModulationType::NonSeasonal,
1819 modulation_score: 0.0,
1820 amplitude_trend: 0.0,
1821 wavelet_amplitude: Vec::new(),
1822 time_points: argvals.to_vec(),
1823 scale: 0.0,
1824 };
1825 }
1826
1827 let mean_curve = compute_mean_curve(data, n, m);
1829
1830 let dc: f64 = mean_curve.iter().sum::<f64>() / m as f64;
1832 let detrended: Vec<f64> = mean_curve.iter().map(|&x| x - dc).collect();
1833
1834 let omega0 = 6.0; let scale = period * omega0 / (2.0 * PI);
1838
1839 let wavelet_coeffs = cwt_morlet_fft(&detrended, argvals, scale, omega0);
1841
1842 if wavelet_coeffs.is_empty() {
1843 return WaveletAmplitudeResult {
1844 is_seasonal: true,
1845 seasonal_strength: overall_strength,
1846 has_modulation: false,
1847 modulation_type: ModulationType::Stable,
1848 modulation_score: 0.0,
1849 amplitude_trend: 0.0,
1850 wavelet_amplitude: Vec::new(),
1851 time_points: argvals.to_vec(),
1852 scale,
1853 };
1854 }
1855
1856 let wavelet_amplitude: Vec<f64> = wavelet_coeffs.iter().map(|c| c.norm()).collect();
1858
1859 let (interior_start, interior_end) = match interior_bounds(m) {
1861 Some((s, e)) if e > s + 4 => (s, e),
1862 _ => {
1863 return WaveletAmplitudeResult {
1864 is_seasonal: true,
1865 seasonal_strength: overall_strength,
1866 has_modulation: false,
1867 modulation_type: ModulationType::Stable,
1868 modulation_score: 0.0,
1869 amplitude_trend: 0.0,
1870 wavelet_amplitude,
1871 time_points: argvals.to_vec(),
1872 scale,
1873 };
1874 }
1875 };
1876
1877 let interior_amp = &wavelet_amplitude[interior_start..interior_end];
1878 let interior_times = &argvals[interior_start..interior_end];
1879 let n_interior = interior_amp.len() as f64;
1880
1881 let mean_amp = interior_amp.iter().sum::<f64>() / n_interior;
1882
1883 let variance = interior_amp
1885 .iter()
1886 .map(|&a| (a - mean_amp).powi(2))
1887 .sum::<f64>()
1888 / n_interior;
1889 let std_amp = variance.sqrt();
1890 let modulation_score = if mean_amp > 1e-10 {
1891 std_amp / mean_amp
1892 } else {
1893 0.0
1894 };
1895
1896 let t_mean = interior_times.iter().sum::<f64>() / n_interior;
1898 let mut cov_ta = 0.0;
1899 let mut var_t = 0.0;
1900 for (&t, &a) in interior_times.iter().zip(interior_amp.iter()) {
1901 cov_ta += (t - t_mean) * (a - mean_amp);
1902 var_t += (t - t_mean).powi(2);
1903 }
1904 let slope = if var_t > 1e-10 { cov_ta / var_t } else { 0.0 };
1905
1906 let time_span = interior_times.last().unwrap_or(&1.0) - interior_times.first().unwrap_or(&0.0);
1907 let amplitude_trend = if mean_amp > 1e-10 && time_span > 1e-10 {
1908 (slope * time_span / mean_amp).clamp(-1.0, 1.0)
1909 } else {
1910 0.0
1911 };
1912
1913 let has_modulation = modulation_score > modulation_threshold;
1915 let modulation_type = if !has_modulation {
1916 ModulationType::Stable
1917 } else if amplitude_trend > 0.3 {
1918 ModulationType::Emerging
1919 } else if amplitude_trend < -0.3 {
1920 ModulationType::Fading
1921 } else {
1922 ModulationType::Oscillating
1923 };
1924
1925 WaveletAmplitudeResult {
1926 is_seasonal: true,
1927 seasonal_strength: overall_strength,
1928 has_modulation,
1929 modulation_type,
1930 modulation_score,
1931 amplitude_trend,
1932 wavelet_amplitude,
1933 time_points: argvals.to_vec(),
1934 scale,
1935 }
1936}
1937
1938pub fn instantaneous_period(
1953 data: &[f64],
1954 n: usize,
1955 m: usize,
1956 argvals: &[f64],
1957) -> InstantaneousPeriod {
1958 if n == 0 || m < 4 || argvals.len() != m {
1959 return InstantaneousPeriod {
1960 period: Vec::new(),
1961 frequency: Vec::new(),
1962 amplitude: Vec::new(),
1963 };
1964 }
1965
1966 let mean_curve = compute_mean_curve(data, n, m);
1968
1969 let dc: f64 = mean_curve.iter().sum::<f64>() / m as f64;
1971 let detrended: Vec<f64> = mean_curve.iter().map(|&x| x - dc).collect();
1972
1973 let analytic = hilbert_transform(&detrended);
1975
1976 let amplitude: Vec<f64> = analytic.iter().map(|c| c.norm()).collect();
1978
1979 let phase: Vec<f64> = analytic.iter().map(|c| c.im.atan2(c.re)).collect();
1980
1981 let unwrapped_phase = unwrap_phase(&phase);
1983
1984 let dt = (argvals[m - 1] - argvals[0]) / (m - 1) as f64;
1986 let mut inst_freq = vec![0.0; m];
1987
1988 if m > 1 {
1990 inst_freq[0] = (unwrapped_phase[1] - unwrapped_phase[0]) / dt / (2.0 * PI);
1991 }
1992 for j in 1..(m - 1) {
1993 inst_freq[j] = (unwrapped_phase[j + 1] - unwrapped_phase[j - 1]) / (2.0 * dt) / (2.0 * PI);
1994 }
1995 if m > 1 {
1996 inst_freq[m - 1] = (unwrapped_phase[m - 1] - unwrapped_phase[m - 2]) / dt / (2.0 * PI);
1997 }
1998
1999 let period: Vec<f64> = inst_freq
2001 .iter()
2002 .map(|&f| {
2003 if f.abs() > 1e-10 {
2004 (1.0 / f).abs()
2005 } else {
2006 f64::INFINITY
2007 }
2008 })
2009 .collect();
2010
2011 InstantaneousPeriod {
2012 period,
2013 frequency: inst_freq,
2014 amplitude,
2015 }
2016}
2017
2018pub fn analyze_peak_timing(
2039 data: &[f64],
2040 n: usize,
2041 m: usize,
2042 argvals: &[f64],
2043 period: f64,
2044 smooth_nbasis: Option<usize>,
2045) -> PeakTimingResult {
2046 if n == 0 || m < 3 || argvals.len() != m || period <= 0.0 {
2047 return PeakTimingResult {
2048 peak_times: Vec::new(),
2049 peak_values: Vec::new(),
2050 normalized_timing: Vec::new(),
2051 mean_timing: f64::NAN,
2052 std_timing: f64::NAN,
2053 range_timing: f64::NAN,
2054 variability_score: f64::NAN,
2055 timing_trend: f64::NAN,
2056 cycle_indices: Vec::new(),
2057 };
2058 }
2059
2060 let min_distance = period * 0.7;
2063 let peaks = detect_peaks(
2064 data,
2065 n,
2066 m,
2067 argvals,
2068 Some(min_distance),
2069 None, true, smooth_nbasis,
2072 );
2073
2074 let sample_peaks = if peaks.peaks.is_empty() {
2077 Vec::new()
2078 } else {
2079 peaks.peaks[0].clone()
2080 };
2081
2082 if sample_peaks.is_empty() {
2083 return PeakTimingResult {
2084 peak_times: Vec::new(),
2085 peak_values: Vec::new(),
2086 normalized_timing: Vec::new(),
2087 mean_timing: f64::NAN,
2088 std_timing: f64::NAN,
2089 range_timing: f64::NAN,
2090 variability_score: f64::NAN,
2091 timing_trend: f64::NAN,
2092 cycle_indices: Vec::new(),
2093 };
2094 }
2095
2096 let peak_times: Vec<f64> = sample_peaks.iter().map(|p| p.time).collect();
2097 let peak_values: Vec<f64> = sample_peaks.iter().map(|p| p.value).collect();
2098
2099 let t_start = argvals[0];
2101 let normalized_timing: Vec<f64> = peak_times
2102 .iter()
2103 .map(|&t| {
2104 let cycle_pos = (t - t_start) % period;
2105 cycle_pos / period
2106 })
2107 .collect();
2108
2109 let cycle_indices: Vec<usize> = peak_times
2111 .iter()
2112 .map(|&t| ((t - t_start) / period).floor() as usize + 1)
2113 .collect();
2114
2115 let n_peaks = normalized_timing.len() as f64;
2117 let mean_timing = normalized_timing.iter().sum::<f64>() / n_peaks;
2118
2119 let variance: f64 = normalized_timing
2120 .iter()
2121 .map(|&x| (x - mean_timing).powi(2))
2122 .sum::<f64>()
2123 / n_peaks;
2124 let std_timing = variance.sqrt();
2125
2126 let min_timing = normalized_timing
2127 .iter()
2128 .cloned()
2129 .fold(f64::INFINITY, f64::min);
2130 let max_timing = normalized_timing
2131 .iter()
2132 .cloned()
2133 .fold(f64::NEG_INFINITY, f64::max);
2134 let range_timing = max_timing - min_timing;
2135
2136 let variability_score = (std_timing / 0.1).min(1.0);
2140
2141 let cycle_idx_f64: Vec<f64> = cycle_indices.iter().map(|&i| i as f64).collect();
2143 let timing_trend = linear_slope(&cycle_idx_f64, &normalized_timing);
2144
2145 PeakTimingResult {
2146 peak_times,
2147 peak_values,
2148 normalized_timing,
2149 mean_timing,
2150 std_timing,
2151 range_timing,
2152 variability_score,
2153 timing_trend,
2154 cycle_indices,
2155 }
2156}
2157
2158pub fn classify_seasonality(
2182 data: &[f64],
2183 n: usize,
2184 m: usize,
2185 argvals: &[f64],
2186 period: f64,
2187 strength_threshold: Option<f64>,
2188 timing_threshold: Option<f64>,
2189) -> SeasonalityClassification {
2190 let strength_thresh = strength_threshold.unwrap_or(0.3);
2191 let timing_thresh = timing_threshold.unwrap_or(0.05);
2192
2193 if n == 0 || m < 4 || argvals.len() != m || period <= 0.0 {
2194 return SeasonalityClassification {
2195 is_seasonal: false,
2196 has_stable_timing: false,
2197 timing_variability: f64::NAN,
2198 seasonal_strength: f64::NAN,
2199 cycle_strengths: Vec::new(),
2200 weak_seasons: Vec::new(),
2201 classification: SeasonalType::NonSeasonal,
2202 peak_timing: None,
2203 };
2204 }
2205
2206 let overall_strength = seasonal_strength_variance(data, n, m, argvals, period, 3);
2208
2209 let dt = (argvals[m - 1] - argvals[0]) / (m - 1) as f64;
2211 let _points_per_cycle = (period / dt).round() as usize;
2212 let t_start = argvals[0];
2213 let t_end = argvals[m - 1];
2214 let n_cycles = ((t_end - t_start) / period).floor() as usize;
2215
2216 let mut cycle_strengths = Vec::with_capacity(n_cycles);
2217 let mut weak_seasons = Vec::new();
2218
2219 for cycle in 0..n_cycles {
2220 let cycle_start = t_start + cycle as f64 * period;
2221 let cycle_end = cycle_start + period;
2222
2223 let start_idx = argvals.iter().position(|&t| t >= cycle_start).unwrap_or(0);
2225 let end_idx = argvals.iter().position(|&t| t > cycle_end).unwrap_or(m);
2226
2227 let cycle_m = end_idx - start_idx;
2228 if cycle_m < 4 {
2229 cycle_strengths.push(f64::NAN);
2230 continue;
2231 }
2232
2233 let cycle_data: Vec<f64> = (start_idx..end_idx)
2235 .flat_map(|j| (0..n).map(move |i| data[i + j * n]))
2236 .collect();
2237 let cycle_argvals: Vec<f64> = argvals[start_idx..end_idx].to_vec();
2238
2239 let strength =
2240 seasonal_strength_variance(&cycle_data, n, cycle_m, &cycle_argvals, period, 3);
2241
2242 cycle_strengths.push(strength);
2243
2244 if strength < strength_thresh {
2245 weak_seasons.push(cycle);
2246 }
2247 }
2248
2249 let peak_timing = analyze_peak_timing(data, n, m, argvals, period, None);
2251
2252 let is_seasonal = overall_strength >= strength_thresh;
2254 let has_stable_timing = peak_timing.std_timing <= timing_thresh;
2255 let timing_variability = peak_timing.variability_score;
2256
2257 let n_weak = weak_seasons.len();
2259 let classification = if !is_seasonal {
2260 SeasonalType::NonSeasonal
2261 } else if n_cycles > 0 && n_weak as f64 / n_cycles as f64 > 0.3 {
2262 SeasonalType::IntermittentSeasonal
2264 } else if !has_stable_timing {
2265 SeasonalType::VariableTiming
2266 } else {
2267 SeasonalType::StableSeasonal
2268 };
2269
2270 SeasonalityClassification {
2271 is_seasonal,
2272 has_stable_timing,
2273 timing_variability,
2274 seasonal_strength: overall_strength,
2275 cycle_strengths,
2276 weak_seasons,
2277 classification,
2278 peak_timing: Some(peak_timing),
2279 }
2280}
2281
2282pub fn detect_seasonality_changes_auto(
2296 data: &[f64],
2297 n: usize,
2298 m: usize,
2299 argvals: &[f64],
2300 period: f64,
2301 threshold_method: ThresholdMethod,
2302 window_size: f64,
2303 min_duration: f64,
2304) -> ChangeDetectionResult {
2305 if n == 0 || m < 4 || argvals.len() != m {
2306 return ChangeDetectionResult {
2307 change_points: Vec::new(),
2308 strength_curve: Vec::new(),
2309 };
2310 }
2311
2312 let strength_curve = seasonal_strength_windowed(
2314 data,
2315 n,
2316 m,
2317 argvals,
2318 period,
2319 window_size,
2320 StrengthMethod::Variance,
2321 );
2322
2323 if strength_curve.is_empty() {
2324 return ChangeDetectionResult {
2325 change_points: Vec::new(),
2326 strength_curve: Vec::new(),
2327 };
2328 }
2329
2330 let threshold = match threshold_method {
2332 ThresholdMethod::Fixed(t) => t,
2333 ThresholdMethod::Percentile(p) => {
2334 let mut sorted: Vec<f64> = strength_curve
2335 .iter()
2336 .copied()
2337 .filter(|x| x.is_finite())
2338 .collect();
2339 sorted.sort_by(|a, b| a.partial_cmp(b).unwrap_or(std::cmp::Ordering::Equal));
2340 if sorted.is_empty() {
2341 0.5
2342 } else {
2343 let idx = ((p / 100.0) * sorted.len() as f64) as usize;
2344 sorted[idx.min(sorted.len() - 1)]
2345 }
2346 }
2347 ThresholdMethod::Otsu => otsu_threshold(&strength_curve),
2348 };
2349
2350 detect_seasonality_changes(
2352 data,
2353 n,
2354 m,
2355 argvals,
2356 period,
2357 threshold,
2358 window_size,
2359 min_duration,
2360 )
2361}
2362
2363#[derive(Debug, Clone)]
2365pub struct SazedResult {
2366 pub period: f64,
2368 pub confidence: f64,
2370 pub component_periods: SazedComponents,
2372 pub agreeing_components: usize,
2374}
2375
2376#[derive(Debug, Clone)]
2378pub struct SazedComponents {
2379 pub spectral: f64,
2381 pub acf_peak: f64,
2383 pub acf_average: f64,
2385 pub zero_crossing: f64,
2387 pub spectral_diff: f64,
2389}
2390
2391pub fn sazed(data: &[f64], argvals: &[f64], tolerance: Option<f64>) -> SazedResult {
2413 let n = data.len();
2414 let tol = tolerance.unwrap_or(0.05); if n < 8 || argvals.len() != n {
2417 return SazedResult {
2418 period: f64::NAN,
2419 confidence: 0.0,
2420 component_periods: SazedComponents {
2421 spectral: f64::NAN,
2422 acf_peak: f64::NAN,
2423 acf_average: f64::NAN,
2424 zero_crossing: f64::NAN,
2425 spectral_diff: f64::NAN,
2426 },
2427 agreeing_components: 0,
2428 };
2429 }
2430
2431 let dt = (argvals[n - 1] - argvals[0]) / (n - 1) as f64;
2432 let max_lag = (n / 2).max(4);
2433 let signal_range = argvals[n - 1] - argvals[0];
2434
2435 let min_period = signal_range / (n as f64 / 3.0);
2437 let max_period = signal_range / 2.0;
2439
2440 let (spectral_period, spectral_conf) = sazed_spectral_with_confidence(data, argvals);
2442
2443 let (acf_peak_period, acf_peak_conf) = sazed_acf_peak_with_confidence(data, dt, max_lag);
2445
2446 let acf_average_period = sazed_acf_average(data, dt, max_lag);
2448
2449 let (zero_crossing_period, zero_crossing_conf) =
2451 sazed_zero_crossing_with_confidence(data, dt, max_lag);
2452
2453 let (spectral_diff_period, spectral_diff_conf) =
2455 sazed_spectral_diff_with_confidence(data, argvals);
2456
2457 let components = SazedComponents {
2458 spectral: spectral_period,
2459 acf_peak: acf_peak_period,
2460 acf_average: acf_average_period,
2461 zero_crossing: zero_crossing_period,
2462 spectral_diff: spectral_diff_period,
2463 };
2464
2465 let spectral_thresh = 8.0; let acf_thresh = 0.3; let zero_crossing_thresh = 0.9; let spectral_diff_thresh = 6.0; let min_confident_components = 2;
2474
2475 let mut confident_periods: Vec<f64> = Vec::new();
2477
2478 if spectral_period.is_finite()
2479 && spectral_period > min_period
2480 && spectral_period < max_period
2481 && spectral_conf > spectral_thresh
2482 {
2483 confident_periods.push(spectral_period);
2484 }
2485
2486 if acf_peak_period.is_finite()
2487 && acf_peak_period > min_period
2488 && acf_peak_period < max_period
2489 && acf_peak_conf > acf_thresh
2490 {
2491 confident_periods.push(acf_peak_period);
2492 }
2493
2494 if acf_average_period.is_finite()
2496 && acf_average_period > min_period
2497 && acf_average_period < max_period
2498 && acf_peak_conf > acf_thresh
2499 {
2500 confident_periods.push(acf_average_period);
2501 }
2502
2503 if zero_crossing_period.is_finite()
2504 && zero_crossing_period > min_period
2505 && zero_crossing_period < max_period
2506 && zero_crossing_conf > zero_crossing_thresh
2507 {
2508 confident_periods.push(zero_crossing_period);
2509 }
2510
2511 if spectral_diff_period.is_finite()
2512 && spectral_diff_period > min_period
2513 && spectral_diff_period < max_period
2514 && spectral_diff_conf > spectral_diff_thresh
2515 {
2516 confident_periods.push(spectral_diff_period);
2517 }
2518
2519 if confident_periods.len() < min_confident_components {
2521 return SazedResult {
2522 period: f64::NAN,
2523 confidence: 0.0,
2524 component_periods: components,
2525 agreeing_components: confident_periods.len(),
2526 };
2527 }
2528
2529 let (consensus_period, agreeing_count) = find_consensus_period(&confident_periods, tol);
2531 let confidence = agreeing_count as f64 / 5.0;
2532
2533 SazedResult {
2534 period: consensus_period,
2535 confidence,
2536 component_periods: components,
2537 agreeing_components: agreeing_count,
2538 }
2539}
2540
2541pub fn sazed_fdata(
2552 data: &[f64],
2553 n: usize,
2554 m: usize,
2555 argvals: &[f64],
2556 tolerance: Option<f64>,
2557) -> SazedResult {
2558 if n == 0 || m < 8 || argvals.len() != m {
2559 return SazedResult {
2560 period: f64::NAN,
2561 confidence: 0.0,
2562 component_periods: SazedComponents {
2563 spectral: f64::NAN,
2564 acf_peak: f64::NAN,
2565 acf_average: f64::NAN,
2566 zero_crossing: f64::NAN,
2567 spectral_diff: f64::NAN,
2568 },
2569 agreeing_components: 0,
2570 };
2571 }
2572
2573 let mean_curve = compute_mean_curve(data, n, m);
2574 sazed(&mean_curve, argvals, tolerance)
2575}
2576
2577fn sazed_spectral_with_confidence(data: &[f64], argvals: &[f64]) -> (f64, f64) {
2579 let (frequencies, power) = periodogram(data, argvals);
2580
2581 if frequencies.len() < 3 {
2582 return (f64::NAN, 0.0);
2583 }
2584
2585 let power_no_dc: Vec<f64> = power.iter().skip(1).copied().collect();
2587
2588 if power_no_dc.is_empty() {
2589 return (f64::NAN, 0.0);
2590 }
2591
2592 let mut sorted_power = power_no_dc.clone();
2594 sorted_power.sort_by(|a, b| a.partial_cmp(b).unwrap_or(std::cmp::Ordering::Equal));
2595 let noise_floor = sorted_power[sorted_power.len() / 2].max(1e-15);
2596
2597 let mut max_idx = 0;
2599 let mut max_val = 0.0;
2600 for (i, &p) in power_no_dc.iter().enumerate() {
2601 if p > max_val {
2602 max_val = p;
2603 max_idx = i;
2604 }
2605 }
2606
2607 let power_ratio = max_val / noise_floor;
2608 let freq = frequencies[max_idx + 1];
2609
2610 if freq > 1e-15 {
2611 (1.0 / freq, power_ratio)
2612 } else {
2613 (f64::NAN, 0.0)
2614 }
2615}
2616
2617fn sazed_acf_peak_with_confidence(data: &[f64], dt: f64, max_lag: usize) -> (f64, f64) {
2619 let acf = autocorrelation(data, max_lag);
2620
2621 if acf.len() < 4 {
2622 return (f64::NAN, 0.0);
2623 }
2624
2625 let mut min_search_start = 1;
2628 for i in 1..acf.len() {
2629 if acf[i] < 0.0 {
2630 min_search_start = i;
2631 break;
2632 }
2633 if i > 1 && acf[i] > acf[i - 1] {
2634 min_search_start = i - 1;
2635 break;
2636 }
2637 }
2638
2639 let peaks = find_peaks_1d(&acf[min_search_start..], 1);
2641
2642 if peaks.is_empty() {
2643 return (f64::NAN, 0.0);
2644 }
2645
2646 let peak_lag = peaks[0] + min_search_start;
2647 let acf_value = acf[peak_lag].max(0.0); (peak_lag as f64 * dt, acf_value)
2650}
2651
2652fn sazed_acf_average(data: &[f64], dt: f64, max_lag: usize) -> f64 {
2654 let acf = autocorrelation(data, max_lag);
2655
2656 if acf.len() < 4 {
2657 return f64::NAN;
2658 }
2659
2660 let peaks = find_peaks_1d(&acf[1..], 1);
2662
2663 if peaks.is_empty() {
2664 return f64::NAN;
2665 }
2666
2667 let mut weighted_sum = 0.0;
2669 let mut weight_sum = 0.0;
2670
2671 for (i, &peak_idx) in peaks.iter().enumerate() {
2672 let lag = peak_idx + 1;
2673 let weight = acf[lag].max(0.0);
2674
2675 if i == 0 {
2676 weighted_sum += lag as f64 * weight;
2678 weight_sum += weight;
2679 } else {
2680 let expected_fundamental = peaks[0] + 1;
2682 let harmonic = ((lag as f64 / expected_fundamental as f64) + 0.5) as usize;
2683 if harmonic > 0 {
2684 let fundamental_est = lag as f64 / harmonic as f64;
2685 weighted_sum += fundamental_est * weight;
2686 weight_sum += weight;
2687 }
2688 }
2689 }
2690
2691 if weight_sum > 1e-15 {
2692 weighted_sum / weight_sum * dt
2693 } else {
2694 f64::NAN
2695 }
2696}
2697
2698fn sazed_zero_crossing_with_confidence(data: &[f64], dt: f64, max_lag: usize) -> (f64, f64) {
2701 let acf = autocorrelation(data, max_lag);
2702
2703 if acf.len() < 4 {
2704 return (f64::NAN, 0.0);
2705 }
2706
2707 let mut crossings = Vec::new();
2709 for i in 1..acf.len() {
2710 if acf[i - 1] * acf[i] < 0.0 {
2711 let frac = acf[i - 1].abs() / (acf[i - 1].abs() + acf[i].abs());
2713 crossings.push((i - 1) as f64 + frac);
2714 }
2715 }
2716
2717 if crossings.len() < 2 {
2718 return (f64::NAN, 0.0);
2719 }
2720
2721 let mut half_periods = Vec::new();
2724 for i in 1..crossings.len() {
2725 half_periods.push(crossings[i] - crossings[i - 1]);
2726 }
2727
2728 if half_periods.is_empty() {
2729 return (f64::NAN, 0.0);
2730 }
2731
2732 let mean: f64 = half_periods.iter().sum::<f64>() / half_periods.len() as f64;
2735 let variance: f64 =
2736 half_periods.iter().map(|x| (x - mean).powi(2)).sum::<f64>() / half_periods.len() as f64;
2737 let std = variance.sqrt();
2738 let consistency = (1.0 - std / mean.max(1e-15)).clamp(0.0, 1.0);
2739
2740 half_periods.sort_by(|a, b| a.partial_cmp(b).unwrap_or(std::cmp::Ordering::Equal));
2742 let median_half = half_periods[half_periods.len() / 2];
2743
2744 (2.0 * median_half * dt, consistency)
2745}
2746
2747fn sazed_spectral_diff_with_confidence(data: &[f64], argvals: &[f64]) -> (f64, f64) {
2749 if data.len() < 4 {
2750 return (f64::NAN, 0.0);
2751 }
2752
2753 let diff: Vec<f64> = data.windows(2).map(|w| w[1] - w[0]).collect();
2755 let diff_argvals: Vec<f64> = argvals.windows(2).map(|w| (w[0] + w[1]) / 2.0).collect();
2756
2757 sazed_spectral_with_confidence(&diff, &diff_argvals)
2758}
2759
2760fn find_spectral_peaks(power: &[f64]) -> Vec<usize> {
2762 if power.len() < 3 {
2763 return Vec::new();
2764 }
2765
2766 let mut sorted_power: Vec<f64> = power.to_vec();
2768 sorted_power.sort_by(|a, b| a.partial_cmp(b).unwrap_or(std::cmp::Ordering::Equal));
2769 let noise_floor = sorted_power[sorted_power.len() / 2];
2770 let threshold = noise_floor * 2.0; let mut peaks: Vec<(usize, f64)> = Vec::new();
2774 for i in 1..(power.len() - 1) {
2775 if power[i] > power[i - 1] && power[i] > power[i + 1] && power[i] > threshold {
2776 peaks.push((i, power[i]));
2777 }
2778 }
2779
2780 peaks.sort_by(|a, b| b.1.partial_cmp(&a.1).unwrap_or(std::cmp::Ordering::Equal));
2782
2783 peaks.into_iter().map(|(idx, _)| idx).collect()
2784}
2785
2786fn find_consensus_period(periods: &[f64], tolerance: f64) -> (f64, usize) {
2788 if periods.is_empty() {
2789 return (f64::NAN, 0);
2790 }
2791 if periods.len() == 1 {
2792 return (periods[0], 1);
2793 }
2794
2795 let mut best_period = periods[0];
2797 let mut best_count = 0;
2798 let mut best_sum = 0.0;
2799
2800 for &p1 in periods {
2801 let mut count = 0;
2802 let mut sum = 0.0;
2803
2804 for &p2 in periods {
2805 let rel_diff = (p1 - p2).abs() / p1.max(p2);
2806 if rel_diff <= tolerance {
2807 count += 1;
2808 sum += p2;
2809 }
2810 }
2811
2812 if count > best_count
2813 || (count == best_count && sum / count as f64 > best_sum / best_count.max(1) as f64)
2814 {
2815 best_count = count;
2816 best_period = sum / count as f64; best_sum = sum;
2818 }
2819 }
2820
2821 (best_period, best_count)
2822}
2823
2824#[derive(Debug, Clone)]
2826pub struct AutoperiodResult {
2827 pub period: f64,
2829 pub confidence: f64,
2831 pub fft_power: f64,
2833 pub acf_validation: f64,
2835 pub candidates: Vec<AutoperiodCandidate>,
2837}
2838
2839#[derive(Debug, Clone)]
2841pub struct AutoperiodCandidate {
2842 pub period: f64,
2844 pub fft_power: f64,
2846 pub acf_score: f64,
2848 pub combined_score: f64,
2850}
2851
2852pub fn autoperiod(
2873 data: &[f64],
2874 argvals: &[f64],
2875 n_candidates: Option<usize>,
2876 gradient_steps: Option<usize>,
2877) -> AutoperiodResult {
2878 let n = data.len();
2879 let max_candidates = n_candidates.unwrap_or(5);
2880 let steps = gradient_steps.unwrap_or(10);
2881
2882 if n < 8 || argvals.len() != n {
2883 return AutoperiodResult {
2884 period: f64::NAN,
2885 confidence: 0.0,
2886 fft_power: 0.0,
2887 acf_validation: 0.0,
2888 candidates: Vec::new(),
2889 };
2890 }
2891
2892 let dt = (argvals[n - 1] - argvals[0]) / (n - 1) as f64;
2893 let max_lag = (n / 2).max(4);
2894
2895 let (frequencies, power) = periodogram(data, argvals);
2897
2898 if frequencies.len() < 3 {
2899 return AutoperiodResult {
2900 period: f64::NAN,
2901 confidence: 0.0,
2902 fft_power: 0.0,
2903 acf_validation: 0.0,
2904 candidates: Vec::new(),
2905 };
2906 }
2907
2908 let power_no_dc: Vec<f64> = power.iter().skip(1).copied().collect();
2910 let peak_indices = find_spectral_peaks(&power_no_dc);
2911
2912 if peak_indices.is_empty() {
2913 return AutoperiodResult {
2914 period: f64::NAN,
2915 confidence: 0.0,
2916 fft_power: 0.0,
2917 acf_validation: 0.0,
2918 candidates: Vec::new(),
2919 };
2920 }
2921
2922 let acf = autocorrelation(data, max_lag);
2924
2925 let mut candidates: Vec<AutoperiodCandidate> = Vec::new();
2927 let total_power: f64 = power_no_dc.iter().sum();
2928
2929 for &peak_idx in peak_indices.iter().take(max_candidates) {
2930 let freq = frequencies[peak_idx + 1];
2931 if freq < 1e-15 {
2932 continue;
2933 }
2934
2935 let initial_period = 1.0 / freq;
2936 let fft_power = power_no_dc[peak_idx];
2937 let normalized_power = fft_power / total_power.max(1e-15);
2938
2939 let refined_period = refine_period_gradient(&acf, initial_period, dt, steps);
2941
2942 let refined_acf_score = validate_period_acf(&acf, refined_period, dt);
2944
2945 let combined_score = normalized_power * refined_acf_score;
2946
2947 candidates.push(AutoperiodCandidate {
2948 period: refined_period,
2949 fft_power,
2950 acf_score: refined_acf_score,
2951 combined_score,
2952 });
2953 }
2954
2955 if candidates.is_empty() {
2956 return AutoperiodResult {
2957 period: f64::NAN,
2958 confidence: 0.0,
2959 fft_power: 0.0,
2960 acf_validation: 0.0,
2961 candidates: Vec::new(),
2962 };
2963 }
2964
2965 let best = candidates
2967 .iter()
2968 .max_by(|a, b| {
2969 a.combined_score
2970 .partial_cmp(&b.combined_score)
2971 .unwrap_or(std::cmp::Ordering::Equal)
2972 })
2973 .unwrap();
2974
2975 AutoperiodResult {
2976 period: best.period,
2977 confidence: best.combined_score,
2978 fft_power: best.fft_power,
2979 acf_validation: best.acf_score,
2980 candidates,
2981 }
2982}
2983
2984pub fn autoperiod_fdata(
2986 data: &[f64],
2987 n: usize,
2988 m: usize,
2989 argvals: &[f64],
2990 n_candidates: Option<usize>,
2991 gradient_steps: Option<usize>,
2992) -> AutoperiodResult {
2993 if n == 0 || m < 8 || argvals.len() != m {
2994 return AutoperiodResult {
2995 period: f64::NAN,
2996 confidence: 0.0,
2997 fft_power: 0.0,
2998 acf_validation: 0.0,
2999 candidates: Vec::new(),
3000 };
3001 }
3002
3003 let mean_curve = compute_mean_curve(data, n, m);
3004 autoperiod(&mean_curve, argvals, n_candidates, gradient_steps)
3005}
3006
3007fn validate_period_acf(acf: &[f64], period: f64, dt: f64) -> f64 {
3009 let lag = (period / dt).round() as usize;
3010
3011 if lag == 0 || lag >= acf.len() {
3012 return 0.0;
3013 }
3014
3015 let acf_at_lag = acf[lag];
3018
3019 let half_lag = lag / 2;
3021 let double_lag = lag * 2;
3022
3023 let mut score = acf_at_lag.max(0.0);
3024
3025 if half_lag > 0 && half_lag < acf.len() {
3028 let half_acf = acf[half_lag];
3029 if half_acf > acf_at_lag * 0.7 {
3031 score *= 0.5;
3032 }
3033 }
3034
3035 if double_lag < acf.len() {
3036 let double_acf = acf[double_lag];
3037 if double_acf > 0.3 {
3039 score *= 1.2;
3040 }
3041 }
3042
3043 score.min(1.0)
3044}
3045
3046fn refine_period_gradient(acf: &[f64], initial_period: f64, dt: f64, steps: usize) -> f64 {
3048 let mut period = initial_period;
3049 let step_size = dt * 0.5; for _ in 0..steps {
3052 let current_score = validate_period_acf(acf, period, dt);
3053 let left_score = validate_period_acf(acf, period - step_size, dt);
3054 let right_score = validate_period_acf(acf, period + step_size, dt);
3055
3056 if left_score > current_score && left_score > right_score {
3057 period -= step_size;
3058 } else if right_score > current_score {
3059 period += step_size;
3060 }
3061 }
3063
3064 period.max(dt) }
3066
3067#[derive(Debug, Clone)]
3069pub struct CfdAutoperiodResult {
3070 pub period: f64,
3072 pub confidence: f64,
3074 pub acf_validation: f64,
3076 pub periods: Vec<f64>,
3078 pub confidences: Vec<f64>,
3080}
3081
3082pub fn cfd_autoperiod(
3103 data: &[f64],
3104 argvals: &[f64],
3105 cluster_tolerance: Option<f64>,
3106 min_cluster_size: Option<usize>,
3107) -> CfdAutoperiodResult {
3108 let n = data.len();
3109 let tol = cluster_tolerance.unwrap_or(0.1);
3110 let min_size = min_cluster_size.unwrap_or(1);
3111
3112 if n < 8 || argvals.len() != n {
3113 return CfdAutoperiodResult {
3114 period: f64::NAN,
3115 confidence: 0.0,
3116 acf_validation: 0.0,
3117 periods: Vec::new(),
3118 confidences: Vec::new(),
3119 };
3120 }
3121
3122 let dt = (argvals[n - 1] - argvals[0]) / (n - 1) as f64;
3123 let max_lag = (n / 2).max(4);
3124
3125 let diff: Vec<f64> = data.windows(2).map(|w| w[1] - w[0]).collect();
3127 let diff_argvals: Vec<f64> = argvals.windows(2).map(|w| (w[0] + w[1]) / 2.0).collect();
3128
3129 let (frequencies, power) = periodogram(&diff, &diff_argvals);
3131
3132 if frequencies.len() < 3 {
3133 return CfdAutoperiodResult {
3134 period: f64::NAN,
3135 confidence: 0.0,
3136 acf_validation: 0.0,
3137 periods: Vec::new(),
3138 confidences: Vec::new(),
3139 };
3140 }
3141
3142 let power_no_dc: Vec<f64> = power.iter().skip(1).copied().collect();
3144 let peak_indices = find_spectral_peaks(&power_no_dc);
3145
3146 if peak_indices.is_empty() {
3147 return CfdAutoperiodResult {
3148 period: f64::NAN,
3149 confidence: 0.0,
3150 acf_validation: 0.0,
3151 periods: Vec::new(),
3152 confidences: Vec::new(),
3153 };
3154 }
3155
3156 let total_power: f64 = power_no_dc.iter().sum();
3158 let mut candidates: Vec<(f64, f64)> = Vec::new(); for &peak_idx in &peak_indices {
3161 let freq = frequencies[peak_idx + 1];
3162 if freq > 1e-15 {
3163 let period = 1.0 / freq;
3164 let normalized_power = power_no_dc[peak_idx] / total_power.max(1e-15);
3165 candidates.push((period, normalized_power));
3166 }
3167 }
3168
3169 if candidates.is_empty() {
3170 return CfdAutoperiodResult {
3171 period: f64::NAN,
3172 confidence: 0.0,
3173 acf_validation: 0.0,
3174 periods: Vec::new(),
3175 confidences: Vec::new(),
3176 };
3177 }
3178
3179 let clusters = cluster_periods(&candidates, tol, min_size);
3181
3182 if clusters.is_empty() {
3183 return CfdAutoperiodResult {
3184 period: f64::NAN,
3185 confidence: 0.0,
3186 acf_validation: 0.0,
3187 periods: Vec::new(),
3188 confidences: Vec::new(),
3189 };
3190 }
3191
3192 let acf = autocorrelation(data, max_lag);
3194
3195 let mut validated: Vec<(f64, f64, f64)> = Vec::new(); for (center, power_sum) in clusters {
3198 let acf_score = validate_period_acf(&acf, center, dt);
3199 if acf_score > 0.1 {
3200 validated.push((center, acf_score, power_sum));
3202 }
3203 }
3204
3205 if validated.is_empty() {
3206 let (center, power_sum) = cluster_periods(&candidates, tol, min_size)
3208 .into_iter()
3209 .max_by(|a, b| a.1.partial_cmp(&b.1).unwrap_or(std::cmp::Ordering::Equal))
3210 .unwrap();
3211 return CfdAutoperiodResult {
3212 period: center,
3213 confidence: power_sum,
3214 acf_validation: 0.0,
3215 periods: vec![center],
3216 confidences: vec![power_sum],
3217 };
3218 }
3219
3220 validated.sort_by(|a, b| {
3222 let score_a = a.1 * a.2;
3223 let score_b = b.1 * b.2;
3224 score_b
3225 .partial_cmp(&score_a)
3226 .unwrap_or(std::cmp::Ordering::Equal)
3227 });
3228
3229 let periods: Vec<f64> = validated.iter().map(|v| v.0).collect();
3230 let confidences: Vec<f64> = validated.iter().map(|v| v.1 * v.2).collect();
3231
3232 CfdAutoperiodResult {
3233 period: validated[0].0,
3234 confidence: validated[0].1 * validated[0].2,
3235 acf_validation: validated[0].1,
3236 periods,
3237 confidences,
3238 }
3239}
3240
3241pub fn cfd_autoperiod_fdata(
3243 data: &[f64],
3244 n: usize,
3245 m: usize,
3246 argvals: &[f64],
3247 cluster_tolerance: Option<f64>,
3248 min_cluster_size: Option<usize>,
3249) -> CfdAutoperiodResult {
3250 if n == 0 || m < 8 || argvals.len() != m {
3251 return CfdAutoperiodResult {
3252 period: f64::NAN,
3253 confidence: 0.0,
3254 acf_validation: 0.0,
3255 periods: Vec::new(),
3256 confidences: Vec::new(),
3257 };
3258 }
3259
3260 let mean_curve = compute_mean_curve(data, n, m);
3261 cfd_autoperiod(&mean_curve, argvals, cluster_tolerance, min_cluster_size)
3262}
3263
3264fn cluster_periods(candidates: &[(f64, f64)], tolerance: f64, min_size: usize) -> Vec<(f64, f64)> {
3266 if candidates.is_empty() {
3267 return Vec::new();
3268 }
3269
3270 let mut sorted: Vec<(f64, f64)> = candidates.to_vec();
3272 sorted.sort_by(|a, b| a.0.partial_cmp(&b.0).unwrap_or(std::cmp::Ordering::Equal));
3273
3274 let mut clusters: Vec<(f64, f64)> = Vec::new(); let mut current_cluster: Vec<(f64, f64)> = vec![sorted[0]];
3276
3277 for &(period, power) in sorted.iter().skip(1) {
3278 let cluster_center =
3279 current_cluster.iter().map(|(p, _)| p).sum::<f64>() / current_cluster.len() as f64;
3280
3281 let rel_diff = (period - cluster_center).abs() / cluster_center.max(period);
3282
3283 if rel_diff <= tolerance {
3284 current_cluster.push((period, power));
3286 } else {
3287 if current_cluster.len() >= min_size {
3289 let center = current_cluster.iter().map(|(p, pw)| p * pw).sum::<f64>()
3290 / current_cluster
3291 .iter()
3292 .map(|(_, pw)| pw)
3293 .sum::<f64>()
3294 .max(1e-15);
3295 let total_power: f64 = current_cluster.iter().map(|(_, pw)| pw).sum();
3296 clusters.push((center, total_power));
3297 }
3298 current_cluster = vec![(period, power)];
3299 }
3300 }
3301
3302 if current_cluster.len() >= min_size {
3304 let center = current_cluster.iter().map(|(p, pw)| p * pw).sum::<f64>()
3305 / current_cluster
3306 .iter()
3307 .map(|(_, pw)| pw)
3308 .sum::<f64>()
3309 .max(1e-15);
3310 let total_power: f64 = current_cluster.iter().map(|(_, pw)| pw).sum();
3311 clusters.push((center, total_power));
3312 }
3313
3314 clusters
3315}
3316
3317#[derive(Debug, Clone)]
3323pub struct LombScargleResult {
3324 pub frequencies: Vec<f64>,
3326 pub periods: Vec<f64>,
3328 pub power: Vec<f64>,
3330 pub peak_period: f64,
3332 pub peak_frequency: f64,
3334 pub peak_power: f64,
3336 pub false_alarm_probability: f64,
3338 pub significance: f64,
3340}
3341
3342pub fn lomb_scargle(
3382 times: &[f64],
3383 values: &[f64],
3384 frequencies: Option<&[f64]>,
3385 oversampling: Option<f64>,
3386 nyquist_factor: Option<f64>,
3387) -> LombScargleResult {
3388 let n = times.len();
3389 assert_eq!(n, values.len(), "times and values must have same length");
3390 assert!(n >= 3, "Need at least 3 data points");
3391
3392 let mean_y: f64 = values.iter().sum::<f64>() / n as f64;
3394 let var_y: f64 = values.iter().map(|&y| (y - mean_y).powi(2)).sum::<f64>() / (n - 1) as f64;
3395
3396 let freq_vec: Vec<f64>;
3398 let freqs = if let Some(f) = frequencies {
3399 f
3400 } else {
3401 freq_vec = generate_ls_frequencies(
3402 times,
3403 oversampling.unwrap_or(4.0),
3404 nyquist_factor.unwrap_or(1.0),
3405 );
3406 &freq_vec
3407 };
3408
3409 let mut power = Vec::with_capacity(freqs.len());
3411
3412 for &freq in freqs.iter() {
3413 let omega = 2.0 * PI * freq;
3414 let p = lomb_scargle_single_freq(times, values, mean_y, var_y, omega);
3415 power.push(p);
3416 }
3417
3418 let (peak_idx, &peak_power) = power
3420 .iter()
3421 .enumerate()
3422 .max_by(|(_, a), (_, b)| a.partial_cmp(b).unwrap())
3423 .unwrap_or((0, &0.0));
3424
3425 let peak_frequency = freqs.get(peak_idx).copied().unwrap_or(0.0);
3426 let peak_period = if peak_frequency > 0.0 {
3427 1.0 / peak_frequency
3428 } else {
3429 f64::INFINITY
3430 };
3431
3432 let n_indep = estimate_independent_frequencies(times, freqs.len());
3434 let fap = lomb_scargle_fap(peak_power, n_indep, n);
3435
3436 let periods: Vec<f64> = freqs
3438 .iter()
3439 .map(|&f| if f > 0.0 { 1.0 / f } else { f64::INFINITY })
3440 .collect();
3441
3442 LombScargleResult {
3443 frequencies: freqs.to_vec(),
3444 periods,
3445 power,
3446 peak_period,
3447 peak_frequency,
3448 peak_power,
3449 false_alarm_probability: fap,
3450 significance: 1.0 - fap,
3451 }
3452}
3453
3454fn lomb_scargle_single_freq(
3458 times: &[f64],
3459 values: &[f64],
3460 mean_y: f64,
3461 var_y: f64,
3462 omega: f64,
3463) -> f64 {
3464 if var_y <= 0.0 || omega <= 0.0 {
3465 return 0.0;
3466 }
3467
3468 let n = times.len();
3469
3470 let mut sum_sin2 = 0.0;
3472 let mut sum_cos2 = 0.0;
3473 for &t in times.iter() {
3474 let arg = 2.0 * omega * t;
3475 sum_sin2 += arg.sin();
3476 sum_cos2 += arg.cos();
3477 }
3478 let tau = (sum_sin2).atan2(sum_cos2) / (2.0 * omega);
3479
3480 let mut ss = 0.0; let mut sc = 0.0; let mut css = 0.0; let mut sss = 0.0; for i in 0..n {
3487 let y_centered = values[i] - mean_y;
3488 let arg = omega * (times[i] - tau);
3489 let c = arg.cos();
3490 let s = arg.sin();
3491
3492 sc += y_centered * c;
3493 ss += y_centered * s;
3494 css += c * c;
3495 sss += s * s;
3496 }
3497
3498 let css = css.max(1e-15);
3500 let sss = sss.max(1e-15);
3501
3502 0.5 * (sc * sc / css + ss * ss / sss) / var_y
3504}
3505
3506fn generate_ls_frequencies(times: &[f64], oversampling: f64, nyquist_factor: f64) -> Vec<f64> {
3510 let n = times.len();
3511 if n < 2 {
3512 return vec![0.0];
3513 }
3514
3515 let t_min = times.iter().cloned().fold(f64::INFINITY, f64::min);
3517 let t_max = times.iter().cloned().fold(f64::NEG_INFINITY, f64::max);
3518 let t_span = (t_max - t_min).max(1e-10);
3519
3520 let f_min = 1.0 / t_span;
3522
3523 let f_nyquist = 0.5 * (n - 1) as f64 / t_span;
3526
3527 let f_max = f_nyquist * nyquist_factor;
3529
3530 let df = f_min / oversampling;
3532
3533 let n_freq = ((f_max - f_min) / df).ceil() as usize + 1;
3535 let n_freq = n_freq.min(10000); (0..n_freq).map(|i| f_min + i as f64 * df).collect()
3538}
3539
3540fn estimate_independent_frequencies(times: &[f64], n_freq: usize) -> usize {
3545 let n = times.len();
3547 n.min(n_freq)
3548}
3549
3550fn lomb_scargle_fap(power: f64, n_indep: usize, _n_data: usize) -> f64 {
3556 if power <= 0.0 || n_indep == 0 {
3557 return 1.0;
3558 }
3559
3560 let prob_single = 1.0 - (-power).exp();
3562
3563 if prob_single >= 1.0 {
3570 return 0.0; }
3572 if prob_single <= 0.0 {
3573 return 1.0; }
3575
3576 let log_prob = prob_single.ln();
3577 let log_cdf = n_indep as f64 * log_prob;
3578
3579 if log_cdf < -700.0 {
3580 0.0 } else {
3582 1.0 - log_cdf.exp()
3583 }
3584}
3585
3586pub fn lomb_scargle_fdata(
3602 data: &[f64],
3603 n: usize,
3604 m: usize,
3605 argvals: &[f64],
3606 oversampling: Option<f64>,
3607 nyquist_factor: Option<f64>,
3608) -> LombScargleResult {
3609 let mean_curve = compute_mean_curve(data, n, m);
3611
3612 lomb_scargle(argvals, &mean_curve, None, oversampling, nyquist_factor)
3614}
3615
3616#[derive(Debug, Clone)]
3622pub struct MatrixProfileResult {
3623 pub profile: Vec<f64>,
3625 pub profile_index: Vec<usize>,
3627 pub subsequence_length: usize,
3629 pub detected_periods: Vec<f64>,
3631 pub arc_counts: Vec<usize>,
3633 pub primary_period: f64,
3635 pub confidence: f64,
3637}
3638
3639pub fn matrix_profile(
3675 values: &[f64],
3676 subsequence_length: Option<usize>,
3677 exclusion_zone: Option<f64>,
3678) -> MatrixProfileResult {
3679 let n = values.len();
3680
3681 let m = subsequence_length.unwrap_or_else(|| {
3683 let default_m = n / 4;
3684 default_m.max(4).min(n / 2)
3685 });
3686
3687 assert!(m >= 3, "Subsequence length must be at least 3");
3688 assert!(
3689 m <= n / 2,
3690 "Subsequence length must be at most half the series length"
3691 );
3692
3693 let exclusion_zone = exclusion_zone.unwrap_or(0.5);
3694 let exclusion_radius = (m as f64 * exclusion_zone).ceil() as usize;
3695
3696 let profile_len = n - m + 1;
3698
3699 let (means, stds) = compute_sliding_stats(values, m);
3701
3702 let (profile, profile_index) = stomp_core(values, m, &means, &stds, exclusion_radius);
3704
3705 let (arc_counts, detected_periods, primary_period, confidence) =
3707 analyze_arcs(&profile_index, profile_len, m);
3708
3709 MatrixProfileResult {
3710 profile,
3711 profile_index,
3712 subsequence_length: m,
3713 detected_periods,
3714 arc_counts,
3715 primary_period,
3716 confidence,
3717 }
3718}
3719
3720fn compute_sliding_stats(values: &[f64], m: usize) -> (Vec<f64>, Vec<f64>) {
3724 let n = values.len();
3725 let profile_len = n - m + 1;
3726
3727 let mut cumsum = vec![0.0; n + 1];
3729 let mut cumsum_sq = vec![0.0; n + 1];
3730
3731 for i in 0..n {
3732 cumsum[i + 1] = cumsum[i] + values[i];
3733 cumsum_sq[i + 1] = cumsum_sq[i] + values[i] * values[i];
3734 }
3735
3736 let mut means = Vec::with_capacity(profile_len);
3738 let mut stds = Vec::with_capacity(profile_len);
3739
3740 let m_f64 = m as f64;
3741
3742 for i in 0..profile_len {
3743 let sum = cumsum[i + m] - cumsum[i];
3744 let sum_sq = cumsum_sq[i + m] - cumsum_sq[i];
3745
3746 let mean = sum / m_f64;
3747 let variance = (sum_sq / m_f64) - mean * mean;
3748 let std = variance.max(0.0).sqrt();
3749
3750 means.push(mean);
3751 stds.push(std.max(1e-10)); }
3753
3754 (means, stds)
3755}
3756
3757fn stomp_core(
3761 values: &[f64],
3762 m: usize,
3763 means: &[f64],
3764 stds: &[f64],
3765 exclusion_radius: usize,
3766) -> (Vec<f64>, Vec<usize>) {
3767 let n = values.len();
3768 let profile_len = n - m + 1;
3769
3770 let mut profile = vec![f64::INFINITY; profile_len];
3772 let mut profile_index = vec![0usize; profile_len];
3773
3774 let mut qt = vec![0.0; profile_len];
3777
3778 for j in 0..profile_len {
3780 let mut dot = 0.0;
3781 for k in 0..m {
3782 dot += values[k] * values[j + k];
3783 }
3784 qt[j] = dot;
3785 }
3786
3787 update_profile_row(
3789 0,
3790 &qt,
3791 means,
3792 stds,
3793 m,
3794 exclusion_radius,
3795 &mut profile,
3796 &mut profile_index,
3797 );
3798
3799 for i in 1..profile_len {
3801 let mut qt_new = vec![0.0; profile_len];
3804
3805 let mut dot = 0.0;
3807 for k in 0..m {
3808 dot += values[i + k] * values[k];
3809 }
3810 qt_new[0] = dot;
3811
3812 for j in 1..profile_len {
3814 qt_new[j] =
3815 qt[j - 1] - values[i - 1] * values[j - 1] + values[i + m - 1] * values[j + m - 1];
3816 }
3817
3818 qt = qt_new;
3819
3820 update_profile_row(
3822 i,
3823 &qt,
3824 means,
3825 stds,
3826 m,
3827 exclusion_radius,
3828 &mut profile,
3829 &mut profile_index,
3830 );
3831 }
3832
3833 (profile, profile_index)
3834}
3835
3836fn update_profile_row(
3838 i: usize,
3839 qt: &[f64],
3840 means: &[f64],
3841 stds: &[f64],
3842 m: usize,
3843 exclusion_radius: usize,
3844 profile: &mut [f64],
3845 profile_index: &mut [usize],
3846) {
3847 let profile_len = profile.len();
3848 let m_f64 = m as f64;
3849
3850 for j in 0..profile_len {
3851 if i.abs_diff(j) <= exclusion_radius {
3853 continue;
3854 }
3855
3856 let numerator = qt[j] - m_f64 * means[i] * means[j];
3859 let denominator = m_f64 * stds[i] * stds[j];
3860
3861 let pearson = if denominator > 0.0 {
3862 (numerator / denominator).clamp(-1.0, 1.0)
3863 } else {
3864 0.0
3865 };
3866
3867 let dist_sq = 2.0 * m_f64 * (1.0 - pearson);
3868 let dist = dist_sq.max(0.0).sqrt();
3869
3870 if dist < profile[i] {
3872 profile[i] = dist;
3873 profile_index[i] = j;
3874 }
3875
3876 if dist < profile[j] {
3878 profile[j] = dist;
3879 profile_index[j] = i;
3880 }
3881 }
3882}
3883
3884fn analyze_arcs(
3889 profile_index: &[usize],
3890 profile_len: usize,
3891 m: usize,
3892) -> (Vec<usize>, Vec<f64>, f64, f64) {
3893 let max_distance = profile_len;
3895 let mut arc_counts = vec![0usize; max_distance];
3896
3897 for (i, &j) in profile_index.iter().enumerate() {
3898 let distance = i.abs_diff(j);
3899 if distance < max_distance {
3900 arc_counts[distance] += 1;
3901 }
3902 }
3903
3904 let min_period = m / 2; let mut peaks: Vec<(usize, usize)> = Vec::new();
3907
3908 for i in min_period..arc_counts.len().saturating_sub(1) {
3910 if arc_counts[i] > arc_counts[i.saturating_sub(1)]
3911 && arc_counts[i] > arc_counts[(i + 1).min(arc_counts.len() - 1)]
3912 && arc_counts[i] >= 3
3913 {
3915 peaks.push((i, arc_counts[i]));
3916 }
3917 }
3918
3919 peaks.sort_by(|a, b| b.1.cmp(&a.1));
3921
3922 let detected_periods: Vec<f64> = peaks.iter().take(5).map(|(p, _)| *p as f64).collect();
3924
3925 let (primary_period, confidence) = if let Some(&(period, count)) = peaks.first() {
3927 let total_arcs: usize = arc_counts[min_period..].iter().sum();
3929 let conf = if total_arcs > 0 {
3930 count as f64 / total_arcs as f64
3931 } else {
3932 0.0
3933 };
3934 (period as f64, conf.min(1.0))
3935 } else {
3936 (0.0, 0.0)
3937 };
3938
3939 (arc_counts, detected_periods, primary_period, confidence)
3940}
3941
3942pub fn matrix_profile_fdata(
3956 data: &[f64],
3957 n: usize,
3958 m: usize,
3959 subsequence_length: Option<usize>,
3960 exclusion_zone: Option<f64>,
3961) -> MatrixProfileResult {
3962 let mean_curve = compute_mean_curve(data, n, m);
3964
3965 matrix_profile(&mean_curve, subsequence_length, exclusion_zone)
3967}
3968
3969pub fn matrix_profile_seasonality(
3981 values: &[f64],
3982 subsequence_length: Option<usize>,
3983 confidence_threshold: Option<f64>,
3984) -> (bool, f64, f64) {
3985 let result = matrix_profile(values, subsequence_length, None);
3986
3987 let threshold = confidence_threshold.unwrap_or(0.1);
3988 let is_seasonal = result.confidence >= threshold && result.primary_period > 0.0;
3989
3990 (is_seasonal, result.primary_period, result.confidence)
3991}
3992
3993#[derive(Debug, Clone)]
3999pub struct SsaResult {
4000 pub trend: Vec<f64>,
4002 pub seasonal: Vec<f64>,
4004 pub noise: Vec<f64>,
4006 pub singular_values: Vec<f64>,
4008 pub contributions: Vec<f64>,
4010 pub window_length: usize,
4012 pub n_components: usize,
4014 pub detected_period: f64,
4016 pub confidence: f64,
4018}
4019
4020pub fn ssa(
4061 values: &[f64],
4062 window_length: Option<usize>,
4063 n_components: Option<usize>,
4064 trend_components: Option<&[usize]>,
4065 seasonal_components: Option<&[usize]>,
4066) -> SsaResult {
4067 let n = values.len();
4068
4069 let l = window_length.unwrap_or_else(|| (n / 2).clamp(2, 50));
4071
4072 if n < 4 || l < 2 || l > n / 2 {
4073 return SsaResult {
4074 trend: values.to_vec(),
4075 seasonal: vec![0.0; n],
4076 noise: vec![0.0; n],
4077 singular_values: vec![],
4078 contributions: vec![],
4079 window_length: l,
4080 n_components: 0,
4081 detected_period: 0.0,
4082 confidence: 0.0,
4083 };
4084 }
4085
4086 let k = n - l + 1;
4088
4089 let trajectory = embed_trajectory(values, l, k);
4091
4092 let (u, sigma, vt) = svd_decompose(&trajectory, l, k);
4094
4095 let max_components = sigma.len();
4097 let n_comp = n_components.unwrap_or(10).min(max_components);
4098
4099 let total_var: f64 = sigma.iter().map(|&s| s * s).sum();
4101 let contributions: Vec<f64> = sigma
4102 .iter()
4103 .take(n_comp)
4104 .map(|&s| s * s / total_var.max(1e-15))
4105 .collect();
4106
4107 let (trend_idx, seasonal_idx, detected_period, confidence) =
4109 if trend_components.is_some() || seasonal_components.is_some() {
4110 let t_idx: Vec<usize> = trend_components.map(|v| v.to_vec()).unwrap_or_default();
4112 let s_idx: Vec<usize> = seasonal_components.map(|v| v.to_vec()).unwrap_or_default();
4113 (t_idx, s_idx, 0.0, 0.0)
4114 } else {
4115 auto_group_ssa_components(&u, &sigma, l, k, n_comp)
4117 };
4118
4119 let trend = reconstruct_grouped(&u, &sigma, &vt, l, k, n, &trend_idx);
4121 let seasonal = reconstruct_grouped(&u, &sigma, &vt, l, k, n, &seasonal_idx);
4122
4123 let noise: Vec<f64> = values
4125 .iter()
4126 .zip(trend.iter())
4127 .zip(seasonal.iter())
4128 .map(|((&y, &t), &s)| y - t - s)
4129 .collect();
4130
4131 SsaResult {
4132 trend,
4133 seasonal,
4134 noise,
4135 singular_values: sigma.into_iter().take(n_comp).collect(),
4136 contributions,
4137 window_length: l,
4138 n_components: n_comp,
4139 detected_period,
4140 confidence,
4141 }
4142}
4143
4144fn embed_trajectory(values: &[f64], l: usize, k: usize) -> Vec<f64> {
4146 let mut trajectory = vec![0.0; l * k];
4148
4149 for j in 0..k {
4150 for i in 0..l {
4151 trajectory[i + j * l] = values[i + j];
4152 }
4153 }
4154
4155 trajectory
4156}
4157
4158fn svd_decompose(trajectory: &[f64], l: usize, k: usize) -> (Vec<f64>, Vec<f64>, Vec<f64>) {
4160 use nalgebra::{DMatrix, SVD};
4161
4162 let mat = DMatrix::from_column_slice(l, k, trajectory);
4164
4165 let svd = SVD::new(mat, true, true);
4167
4168 let u_mat = svd.u.unwrap();
4170 let vt_mat = svd.v_t.unwrap();
4171 let sigma = svd.singular_values;
4172
4173 let u: Vec<f64> = u_mat.iter().cloned().collect();
4175 let sigma_vec: Vec<f64> = sigma.iter().cloned().collect();
4176 let vt: Vec<f64> = vt_mat.iter().cloned().collect();
4177
4178 (u, sigma_vec, vt)
4179}
4180
4181fn auto_group_ssa_components(
4183 u: &[f64],
4184 sigma: &[f64],
4185 l: usize,
4186 _k: usize,
4187 n_comp: usize,
4188) -> (Vec<usize>, Vec<usize>, f64, f64) {
4189 let mut trend_idx = Vec::new();
4190 let mut seasonal_idx = Vec::new();
4191 let mut detected_period = 0.0;
4192 let mut confidence = 0.0;
4193
4194 for i in 0..n_comp.min(sigma.len()) {
4196 let u_col: Vec<f64> = (0..l).map(|j| u[j + i * l]).collect();
4198
4199 let is_trend = is_trend_component(&u_col);
4201
4202 let (is_periodic, period) = is_periodic_component(&u_col);
4204
4205 if is_trend && trend_idx.len() < 2 {
4206 trend_idx.push(i);
4207 } else if is_periodic {
4208 seasonal_idx.push(i);
4209 if detected_period == 0.0 && period > 0.0 {
4210 detected_period = period;
4211 confidence = sigma[i] / sigma[0].max(1e-15);
4212 }
4213 }
4214 }
4215
4216 if trend_idx.is_empty() && n_comp > 0 {
4218 trend_idx.push(0);
4219 }
4220
4221 if seasonal_idx.is_empty() && n_comp >= 3 {
4223 seasonal_idx.push(1);
4224 if n_comp > 2 {
4225 seasonal_idx.push(2);
4226 }
4227 }
4228
4229 (trend_idx, seasonal_idx, detected_period, confidence)
4230}
4231
4232fn is_trend_component(u_col: &[f64]) -> bool {
4234 let n = u_col.len();
4235 if n < 3 {
4236 return false;
4237 }
4238
4239 let mut sign_changes = 0;
4241 for i in 1..n {
4242 if u_col[i] * u_col[i - 1] < 0.0 {
4243 sign_changes += 1;
4244 }
4245 }
4246
4247 sign_changes <= n / 10
4249}
4250
4251fn is_periodic_component(u_col: &[f64]) -> (bool, f64) {
4253 let n = u_col.len();
4254 if n < 4 {
4255 return (false, 0.0);
4256 }
4257
4258 let mean: f64 = u_col.iter().sum::<f64>() / n as f64;
4260 let centered: Vec<f64> = u_col.iter().map(|&x| x - mean).collect();
4261
4262 let var: f64 = centered.iter().map(|&x| x * x).sum();
4263 if var < 1e-15 {
4264 return (false, 0.0);
4265 }
4266
4267 let mut best_period = 0.0;
4269 let mut best_acf = 0.0;
4270
4271 for lag in 2..n / 2 {
4272 let mut acf = 0.0;
4273 for i in 0..(n - lag) {
4274 acf += centered[i] * centered[i + lag];
4275 }
4276 acf /= var;
4277
4278 if acf > best_acf && acf > 0.3 {
4279 best_acf = acf;
4280 best_period = lag as f64;
4281 }
4282 }
4283
4284 let is_periodic = best_acf > 0.3 && best_period > 0.0;
4285 (is_periodic, best_period)
4286}
4287
4288fn reconstruct_grouped(
4290 u: &[f64],
4291 sigma: &[f64],
4292 vt: &[f64],
4293 l: usize,
4294 k: usize,
4295 n: usize,
4296 group_idx: &[usize],
4297) -> Vec<f64> {
4298 if group_idx.is_empty() {
4299 return vec![0.0; n];
4300 }
4301
4302 let mut grouped_matrix = vec![0.0; l * k];
4304
4305 for &idx in group_idx {
4306 if idx >= sigma.len() {
4307 continue;
4308 }
4309
4310 let s = sigma[idx];
4311
4312 for j in 0..k {
4314 for i in 0..l {
4315 let u_val = u[i + idx * l];
4316 let v_val = vt[idx + j * sigma.len().min(l)]; grouped_matrix[i + j * l] += s * u_val * v_val;
4318 }
4319 }
4320 }
4321
4322 diagonal_average(&grouped_matrix, l, k, n)
4324}
4325
4326fn diagonal_average(matrix: &[f64], l: usize, k: usize, n: usize) -> Vec<f64> {
4328 let mut result = vec![0.0; n];
4329 let mut counts = vec![0.0; n];
4330
4331 for j in 0..k {
4333 for i in 0..l {
4334 let idx = i + j; if idx < n {
4336 result[idx] += matrix[i + j * l];
4337 counts[idx] += 1.0;
4338 }
4339 }
4340 }
4341
4342 for i in 0..n {
4344 if counts[i] > 0.0 {
4345 result[i] /= counts[i];
4346 }
4347 }
4348
4349 result
4350}
4351
4352pub fn ssa_fdata(
4364 data: &[f64],
4365 n: usize,
4366 m: usize,
4367 window_length: Option<usize>,
4368 n_components: Option<usize>,
4369) -> SsaResult {
4370 let mean_curve = compute_mean_curve(data, n, m);
4372
4373 ssa(&mean_curve, window_length, n_components, None, None)
4375}
4376
4377pub fn ssa_seasonality(
4387 values: &[f64],
4388 window_length: Option<usize>,
4389 confidence_threshold: Option<f64>,
4390) -> (bool, f64, f64) {
4391 let result = ssa(values, window_length, None, None, None);
4392
4393 let threshold = confidence_threshold.unwrap_or(0.1);
4394 let is_seasonal = result.confidence >= threshold && result.detected_period > 0.0;
4395
4396 (is_seasonal, result.detected_period, result.confidence)
4397}
4398
4399#[cfg(test)]
4400mod tests {
4401 use super::*;
4402 use std::f64::consts::PI;
4403
4404 fn generate_sine(n: usize, m: usize, period: f64, argvals: &[f64]) -> Vec<f64> {
4405 let mut data = vec![0.0; n * m];
4406 for i in 0..n {
4407 for j in 0..m {
4408 data[i + j * n] = (2.0 * PI * argvals[j] / period).sin();
4409 }
4410 }
4411 data
4412 }
4413
4414 #[test]
4415 fn test_period_estimation_fft() {
4416 let m = 200;
4417 let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.1).collect();
4418 let period = 2.0;
4419 let data = generate_sine(1, m, period, &argvals);
4420
4421 let estimate = estimate_period_fft(&data, 1, m, &argvals);
4422 assert!((estimate.period - period).abs() < 0.2);
4423 assert!(estimate.confidence > 1.0);
4424 }
4425
4426 #[test]
4427 fn test_peak_detection() {
4428 let m = 100;
4429 let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.1).collect();
4430 let period = 2.0;
4431 let data = generate_sine(1, m, period, &argvals);
4432
4433 let result = detect_peaks(&data, 1, m, &argvals, Some(1.5), None, false, None);
4434
4435 assert!(!result.peaks[0].is_empty());
4437 assert!((result.mean_period - period).abs() < 0.3);
4438 }
4439
4440 #[test]
4441 fn test_peak_detection_known_sine() {
4442 let m = 200; let period = 2.0;
4446 let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 10.0 / (m - 1) as f64).collect();
4447 let data: Vec<f64> = argvals
4448 .iter()
4449 .map(|&t| (2.0 * std::f64::consts::PI * t / period).sin())
4450 .collect();
4451
4452 let result = detect_peaks(&data, 1, m, &argvals, None, None, false, None);
4453
4454 assert_eq!(
4456 result.peaks[0].len(),
4457 5,
4458 "Expected 5 peaks, got {}. Peak times: {:?}",
4459 result.peaks[0].len(),
4460 result.peaks[0].iter().map(|p| p.time).collect::<Vec<_>>()
4461 );
4462
4463 let expected_times = [0.5, 2.5, 4.5, 6.5, 8.5];
4465 for (peak, expected) in result.peaks[0].iter().zip(expected_times.iter()) {
4466 assert!(
4467 (peak.time - expected).abs() < 0.15,
4468 "Peak at {:.3} not close to expected {:.3}",
4469 peak.time,
4470 expected
4471 );
4472 }
4473
4474 assert!(
4476 (result.mean_period - period).abs() < 0.1,
4477 "Mean period {:.3} not close to expected {:.3}",
4478 result.mean_period,
4479 period
4480 );
4481 }
4482
4483 #[test]
4484 fn test_peak_detection_with_min_distance() {
4485 let m = 200;
4487 let period = 2.0;
4488 let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 10.0 / (m - 1) as f64).collect();
4489 let data: Vec<f64> = argvals
4490 .iter()
4491 .map(|&t| (2.0 * std::f64::consts::PI * t / period).sin())
4492 .collect();
4493
4494 let result = detect_peaks(&data, 1, m, &argvals, Some(1.5), None, false, None);
4496 assert_eq!(
4497 result.peaks[0].len(),
4498 5,
4499 "With min_distance=1.5, expected 5 peaks, got {}",
4500 result.peaks[0].len()
4501 );
4502
4503 let result2 = detect_peaks(&data, 1, m, &argvals, Some(2.5), None, false, None);
4505 assert!(
4506 result2.peaks[0].len() < 5,
4507 "With min_distance=2.5, expected fewer than 5 peaks, got {}",
4508 result2.peaks[0].len()
4509 );
4510 }
4511
4512 #[test]
4513 fn test_peak_detection_period_1() {
4514 let m = 400; let period = 1.0;
4518 let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 10.0 / (m - 1) as f64).collect();
4519 let data: Vec<f64> = argvals
4520 .iter()
4521 .map(|&t| (2.0 * std::f64::consts::PI * t / period).sin())
4522 .collect();
4523
4524 let result = detect_peaks(&data, 1, m, &argvals, None, None, false, None);
4525
4526 assert_eq!(
4528 result.peaks[0].len(),
4529 10,
4530 "Expected 10 peaks, got {}",
4531 result.peaks[0].len()
4532 );
4533
4534 assert!(
4536 (result.mean_period - period).abs() < 0.1,
4537 "Mean period {:.3} not close to expected {:.3}",
4538 result.mean_period,
4539 period
4540 );
4541 }
4542
4543 #[test]
4544 fn test_peak_detection_shifted_sine() {
4545 let m = 200;
4548 let period = 2.0;
4549 let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 10.0 / (m - 1) as f64).collect();
4550 let data: Vec<f64> = argvals
4551 .iter()
4552 .map(|&t| (2.0 * std::f64::consts::PI * t / period).sin() + 1.0)
4553 .collect();
4554
4555 let result = detect_peaks(&data, 1, m, &argvals, None, None, false, None);
4556
4557 assert_eq!(
4559 result.peaks[0].len(),
4560 5,
4561 "Expected 5 peaks for shifted sine, got {}",
4562 result.peaks[0].len()
4563 );
4564
4565 for peak in &result.peaks[0] {
4567 assert!(
4568 (peak.value - 2.0).abs() < 0.05,
4569 "Peak value {:.3} not close to expected 2.0",
4570 peak.value
4571 );
4572 }
4573 }
4574
4575 #[test]
4576 fn test_peak_detection_prominence() {
4577 let m = 200;
4580 let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 10.0 / (m - 1) as f64).collect();
4581 let data: Vec<f64> = argvals
4582 .iter()
4583 .map(|&t| {
4584 let base = (2.0 * std::f64::consts::PI * t / 2.0).sin();
4585 let ripple = 0.1 * (2.0 * std::f64::consts::PI * t * 4.0).sin();
4587 base + ripple
4588 })
4589 .collect();
4590
4591 let result_no_filter = detect_peaks(&data, 1, m, &argvals, None, None, false, None);
4593
4594 let result_filtered = detect_peaks(&data, 1, m, &argvals, None, Some(0.5), false, None);
4596
4597 assert!(
4599 result_filtered.peaks[0].len() <= result_no_filter.peaks[0].len(),
4600 "Prominence filter should reduce peak count"
4601 );
4602 }
4603
4604 #[test]
4605 fn test_peak_detection_different_amplitudes() {
4606 let m = 200;
4608 let period = 2.0;
4609 let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 10.0 / (m - 1) as f64).collect();
4610
4611 for amplitude in [0.5, 1.0, 2.0, 5.0] {
4612 let data: Vec<f64> = argvals
4613 .iter()
4614 .map(|&t| amplitude * (2.0 * std::f64::consts::PI * t / period).sin())
4615 .collect();
4616
4617 let result = detect_peaks(&data, 1, m, &argvals, None, None, false, None);
4618
4619 assert_eq!(
4620 result.peaks[0].len(),
4621 5,
4622 "Amplitude {} should still find 5 peaks",
4623 amplitude
4624 );
4625
4626 for peak in &result.peaks[0] {
4628 assert!(
4629 (peak.value - amplitude).abs() < 0.1,
4630 "Peak value {:.3} should be close to amplitude {}",
4631 peak.value,
4632 amplitude
4633 );
4634 }
4635 }
4636 }
4637
4638 #[test]
4639 fn test_peak_detection_varying_frequency() {
4640 let m = 400;
4643 let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 10.0 / (m - 1) as f64).collect();
4644
4645 let data: Vec<f64> = argvals
4648 .iter()
4649 .map(|&t| {
4650 let phase = 2.0 * std::f64::consts::PI * (0.5 * t + 0.05 * t * t);
4651 phase.sin()
4652 })
4653 .collect();
4654
4655 let result = detect_peaks(&data, 1, m, &argvals, None, None, false, None);
4656
4657 assert!(
4659 result.peaks[0].len() >= 5,
4660 "Should find at least 5 peaks, got {}",
4661 result.peaks[0].len()
4662 );
4663
4664 let distances = &result.inter_peak_distances[0];
4666 if distances.len() >= 3 {
4667 let early_avg = (distances[0] + distances[1]) / 2.0;
4669 let late_avg = (distances[distances.len() - 2] + distances[distances.len() - 1]) / 2.0;
4670 assert!(
4671 late_avg < early_avg,
4672 "Later peaks should be closer: early avg={:.3}, late avg={:.3}",
4673 early_avg,
4674 late_avg
4675 );
4676 }
4677 }
4678
4679 #[test]
4680 fn test_peak_detection_sum_of_sines() {
4681 let m = 300;
4684 let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 12.0 / (m - 1) as f64).collect();
4685
4686 let data: Vec<f64> = argvals
4687 .iter()
4688 .map(|&t| {
4689 (2.0 * std::f64::consts::PI * t / 2.0).sin()
4690 + 0.5 * (2.0 * std::f64::consts::PI * t / 3.0).sin()
4691 })
4692 .collect();
4693
4694 let result = detect_peaks(&data, 1, m, &argvals, Some(1.0), None, false, None);
4695
4696 assert!(
4698 result.peaks[0].len() >= 4,
4699 "Should find at least 4 peaks, got {}",
4700 result.peaks[0].len()
4701 );
4702
4703 let distances = &result.inter_peak_distances[0];
4705 if distances.len() >= 2 {
4706 let min_dist = distances.iter().cloned().fold(f64::INFINITY, f64::min);
4707 let max_dist = distances.iter().cloned().fold(0.0, f64::max);
4708 assert!(
4709 max_dist > min_dist * 1.1,
4710 "Distances should vary: min={:.3}, max={:.3}",
4711 min_dist,
4712 max_dist
4713 );
4714 }
4715 }
4716
4717 #[test]
4718 fn test_seasonal_strength() {
4719 let m = 200;
4720 let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.1).collect();
4721 let period = 2.0;
4722 let data = generate_sine(1, m, period, &argvals);
4723
4724 let strength = seasonal_strength_variance(&data, 1, m, &argvals, period, 3);
4725 assert!(strength > 0.8);
4727
4728 let strength_spectral = seasonal_strength_spectral(&data, 1, m, &argvals, period);
4729 assert!(strength_spectral > 0.5);
4730 }
4731
4732 #[test]
4733 fn test_instantaneous_period() {
4734 let m = 200;
4735 let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.1).collect();
4736 let period = 2.0;
4737 let data = generate_sine(1, m, period, &argvals);
4738
4739 let result = instantaneous_period(&data, 1, m, &argvals);
4740
4741 let mid_period = result.period[m / 2];
4743 assert!(
4744 (mid_period - period).abs() < 0.5,
4745 "Expected period ~{}, got {}",
4746 period,
4747 mid_period
4748 );
4749 }
4750
4751 #[test]
4752 fn test_peak_timing_analysis() {
4753 let m = 500;
4755 let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.02).collect();
4756 let period = 2.0;
4757 let data = generate_sine(1, m, period, &argvals);
4758
4759 let result = analyze_peak_timing(&data, 1, m, &argvals, period, Some(11));
4760
4761 assert!(!result.peak_times.is_empty());
4763 assert!(result.mean_timing.is_finite());
4765 assert!(result.std_timing < 0.1 || result.std_timing.is_nan());
4767 }
4768
4769 #[test]
4770 fn test_seasonality_classification() {
4771 let m = 400;
4772 let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.05).collect();
4773 let period = 2.0;
4774 let data = generate_sine(1, m, period, &argvals);
4775
4776 let result = classify_seasonality(&data, 1, m, &argvals, period, None, None);
4777
4778 assert!(result.is_seasonal);
4779 assert!(result.seasonal_strength > 0.5);
4780 assert!(matches!(
4781 result.classification,
4782 SeasonalType::StableSeasonal | SeasonalType::VariableTiming
4783 ));
4784 }
4785
4786 #[test]
4787 fn test_otsu_threshold() {
4788 let values = vec![
4790 0.1, 0.12, 0.15, 0.18, 0.11, 0.14, 0.7, 0.75, 0.8, 0.85, 0.9, 0.72,
4791 ];
4792
4793 let threshold = otsu_threshold(&values);
4794
4795 assert!(threshold >= 0.1, "Threshold {} should be >= 0.1", threshold);
4799 assert!(threshold <= 0.9, "Threshold {} should be <= 0.9", threshold);
4800 }
4801
4802 #[test]
4803 fn test_gcv_fourier_nbasis_selection() {
4804 let m = 100;
4805 let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.1).collect();
4806
4807 let mut data = vec![0.0; m];
4809 for j in 0..m {
4810 data[j] = (2.0 * PI * argvals[j] / 2.0).sin() + 0.1 * (j as f64 * 0.3).sin();
4811 }
4812
4813 let nbasis = crate::basis::select_fourier_nbasis_gcv(&data, 1, m, &argvals, 5, 25);
4814
4815 assert!(nbasis >= 5);
4817 assert!(nbasis <= 25);
4818 }
4819
4820 #[test]
4821 fn test_detect_multiple_periods() {
4822 let m = 400;
4823 let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.05).collect(); let period1 = 2.0;
4827 let period2 = 7.0;
4828 let mut data = vec![0.0; m];
4829 for j in 0..m {
4830 data[j] = (2.0 * PI * argvals[j] / period1).sin()
4831 + 0.6 * (2.0 * PI * argvals[j] / period2).sin();
4832 }
4833
4834 let detected = detect_multiple_periods(&data, 1, m, &argvals, 5, 0.4, 0.20);
4836
4837 assert!(
4839 detected.len() >= 2,
4840 "Expected at least 2 periods, found {}",
4841 detected.len()
4842 );
4843
4844 let periods: Vec<f64> = detected.iter().map(|d| d.period).collect();
4846 let has_period1 = periods.iter().any(|&p| (p - period1).abs() < 0.3);
4847 let has_period2 = periods.iter().any(|&p| (p - period2).abs() < 0.5);
4848
4849 assert!(
4850 has_period1,
4851 "Expected to find period ~{}, got {:?}",
4852 period1, periods
4853 );
4854 assert!(
4855 has_period2,
4856 "Expected to find period ~{}, got {:?}",
4857 period2, periods
4858 );
4859
4860 assert!(
4862 detected[0].amplitude > detected[1].amplitude,
4863 "First detected should have higher amplitude"
4864 );
4865
4866 for d in &detected {
4868 assert!(
4869 d.strength > 0.0,
4870 "Detected period should have positive strength"
4871 );
4872 assert!(
4873 d.confidence > 0.0,
4874 "Detected period should have positive confidence"
4875 );
4876 assert!(
4877 d.amplitude > 0.0,
4878 "Detected period should have positive amplitude"
4879 );
4880 }
4881 }
4882
4883 #[test]
4888 fn test_amplitude_modulation_stable() {
4889 let m = 200;
4891 let period = 0.2;
4892 let argvals: Vec<f64> = (0..m).map(|i| i as f64 / (m - 1) as f64).collect();
4893
4894 let data: Vec<f64> = argvals
4896 .iter()
4897 .map(|&t| (2.0 * PI * t / period).sin())
4898 .collect();
4899
4900 let result = detect_amplitude_modulation(
4901 &data, 1, m, &argvals, period, 0.15, 0.3, );
4904
4905 eprintln!(
4906 "Stable test: is_seasonal={}, has_modulation={}, modulation_score={:.4}, amplitude_trend={:.4}, type={:?}",
4907 result.is_seasonal, result.has_modulation, result.modulation_score, result.amplitude_trend, result.modulation_type
4908 );
4909
4910 assert!(result.is_seasonal, "Should detect seasonality");
4911 assert!(
4912 !result.has_modulation,
4913 "Constant amplitude should not have modulation, got score={:.4}",
4914 result.modulation_score
4915 );
4916 assert_eq!(
4917 result.modulation_type,
4918 ModulationType::Stable,
4919 "Should be classified as Stable"
4920 );
4921 }
4922
4923 #[test]
4924 fn test_amplitude_modulation_emerging() {
4925 let m = 200;
4927 let period = 0.2;
4928 let argvals: Vec<f64> = (0..m).map(|i| i as f64 / (m - 1) as f64).collect();
4929
4930 let data: Vec<f64> = argvals
4932 .iter()
4933 .map(|&t| {
4934 let amplitude = 0.2 + 0.8 * t; amplitude * (2.0 * PI * t / period).sin()
4936 })
4937 .collect();
4938
4939 let result = detect_amplitude_modulation(&data, 1, m, &argvals, period, 0.15, 0.2);
4940
4941 eprintln!(
4942 "Emerging test: is_seasonal={}, has_modulation={}, modulation_score={:.4}, amplitude_trend={:.4}, type={:?}",
4943 result.is_seasonal, result.has_modulation, result.modulation_score, result.amplitude_trend, result.modulation_type
4944 );
4945
4946 assert!(result.is_seasonal, "Should detect seasonality");
4947 assert!(
4948 result.has_modulation,
4949 "Growing amplitude should have modulation, score={:.4}",
4950 result.modulation_score
4951 );
4952 assert_eq!(
4953 result.modulation_type,
4954 ModulationType::Emerging,
4955 "Should be classified as Emerging, trend={:.4}",
4956 result.amplitude_trend
4957 );
4958 assert!(
4959 result.amplitude_trend > 0.0,
4960 "Trend should be positive for emerging"
4961 );
4962 }
4963
4964 #[test]
4965 fn test_amplitude_modulation_fading() {
4966 let m = 200;
4968 let period = 0.2;
4969 let argvals: Vec<f64> = (0..m).map(|i| i as f64 / (m - 1) as f64).collect();
4970
4971 let data: Vec<f64> = argvals
4973 .iter()
4974 .map(|&t| {
4975 let amplitude = 1.0 - 0.8 * t; amplitude * (2.0 * PI * t / period).sin()
4977 })
4978 .collect();
4979
4980 let result = detect_amplitude_modulation(&data, 1, m, &argvals, period, 0.15, 0.2);
4981
4982 eprintln!(
4983 "Fading test: is_seasonal={}, has_modulation={}, modulation_score={:.4}, amplitude_trend={:.4}, type={:?}",
4984 result.is_seasonal, result.has_modulation, result.modulation_score, result.amplitude_trend, result.modulation_type
4985 );
4986
4987 assert!(result.is_seasonal, "Should detect seasonality");
4988 assert!(
4989 result.has_modulation,
4990 "Fading amplitude should have modulation"
4991 );
4992 assert_eq!(
4993 result.modulation_type,
4994 ModulationType::Fading,
4995 "Should be classified as Fading, trend={:.4}",
4996 result.amplitude_trend
4997 );
4998 assert!(
4999 result.amplitude_trend < 0.0,
5000 "Trend should be negative for fading"
5001 );
5002 }
5003
5004 #[test]
5005 fn test_amplitude_modulation_oscillating() {
5006 let m = 200;
5008 let period = 0.1;
5009 let argvals: Vec<f64> = (0..m).map(|i| i as f64 / (m - 1) as f64).collect();
5010
5011 let data: Vec<f64> = argvals
5013 .iter()
5014 .map(|&t| {
5015 let amplitude = 0.5 + 0.4 * (2.0 * PI * t * 2.0).sin(); amplitude * (2.0 * PI * t / period).sin()
5017 })
5018 .collect();
5019
5020 let result = detect_amplitude_modulation(&data, 1, m, &argvals, period, 0.15, 0.2);
5021
5022 eprintln!(
5023 "Oscillating test: is_seasonal={}, has_modulation={}, modulation_score={:.4}, amplitude_trend={:.4}, type={:?}",
5024 result.is_seasonal, result.has_modulation, result.modulation_score, result.amplitude_trend, result.modulation_type
5025 );
5026
5027 assert!(result.is_seasonal, "Should detect seasonality");
5028 if result.has_modulation {
5030 assert!(
5032 result.amplitude_trend.abs() < 0.5,
5033 "Trend should be small for oscillating"
5034 );
5035 }
5036 }
5037
5038 #[test]
5039 fn test_amplitude_modulation_non_seasonal() {
5040 let m = 100;
5042 let argvals: Vec<f64> = (0..m).map(|i| i as f64 / (m - 1) as f64).collect();
5043
5044 let data: Vec<f64> = (0..m)
5046 .map(|i| ((i as f64 * 1.618).sin() * 100.0).fract())
5047 .collect();
5048
5049 let result = detect_amplitude_modulation(
5050 &data, 1, m, &argvals, 0.2, 0.15, 0.3,
5052 );
5053
5054 assert!(
5055 !result.is_seasonal,
5056 "Noise should not be detected as seasonal"
5057 );
5058 assert_eq!(
5059 result.modulation_type,
5060 ModulationType::NonSeasonal,
5061 "Should be classified as NonSeasonal"
5062 );
5063 }
5064
5065 #[test]
5070 fn test_wavelet_amplitude_modulation_stable() {
5071 let m = 200;
5072 let period = 0.2;
5073 let argvals: Vec<f64> = (0..m).map(|i| i as f64 / (m - 1) as f64).collect();
5074
5075 let data: Vec<f64> = argvals
5076 .iter()
5077 .map(|&t| (2.0 * PI * t / period).sin())
5078 .collect();
5079
5080 let result = detect_amplitude_modulation_wavelet(&data, 1, m, &argvals, period, 0.15, 0.3);
5081
5082 eprintln!(
5083 "Wavelet stable: is_seasonal={}, has_modulation={}, modulation_score={:.4}, amplitude_trend={:.4}, type={:?}",
5084 result.is_seasonal, result.has_modulation, result.modulation_score, result.amplitude_trend, result.modulation_type
5085 );
5086
5087 assert!(result.is_seasonal, "Should detect seasonality");
5088 assert!(
5089 !result.has_modulation,
5090 "Constant amplitude should not have modulation, got score={:.4}",
5091 result.modulation_score
5092 );
5093 }
5094
5095 #[test]
5096 fn test_wavelet_amplitude_modulation_emerging() {
5097 let m = 200;
5098 let period = 0.2;
5099 let argvals: Vec<f64> = (0..m).map(|i| i as f64 / (m - 1) as f64).collect();
5100
5101 let data: Vec<f64> = argvals
5103 .iter()
5104 .map(|&t| {
5105 let amplitude = 0.2 + 0.8 * t;
5106 amplitude * (2.0 * PI * t / period).sin()
5107 })
5108 .collect();
5109
5110 let result = detect_amplitude_modulation_wavelet(&data, 1, m, &argvals, period, 0.15, 0.2);
5111
5112 eprintln!(
5113 "Wavelet emerging: is_seasonal={}, has_modulation={}, modulation_score={:.4}, amplitude_trend={:.4}, type={:?}",
5114 result.is_seasonal, result.has_modulation, result.modulation_score, result.amplitude_trend, result.modulation_type
5115 );
5116
5117 assert!(result.is_seasonal, "Should detect seasonality");
5118 assert!(
5119 result.has_modulation,
5120 "Growing amplitude should have modulation"
5121 );
5122 assert!(
5123 result.amplitude_trend > 0.0,
5124 "Trend should be positive for emerging"
5125 );
5126 }
5127
5128 #[test]
5129 fn test_wavelet_amplitude_modulation_fading() {
5130 let m = 200;
5131 let period = 0.2;
5132 let argvals: Vec<f64> = (0..m).map(|i| i as f64 / (m - 1) as f64).collect();
5133
5134 let data: Vec<f64> = argvals
5136 .iter()
5137 .map(|&t| {
5138 let amplitude = 1.0 - 0.8 * t;
5139 amplitude * (2.0 * PI * t / period).sin()
5140 })
5141 .collect();
5142
5143 let result = detect_amplitude_modulation_wavelet(&data, 1, m, &argvals, period, 0.15, 0.2);
5144
5145 eprintln!(
5146 "Wavelet fading: is_seasonal={}, has_modulation={}, modulation_score={:.4}, amplitude_trend={:.4}, type={:?}",
5147 result.is_seasonal, result.has_modulation, result.modulation_score, result.amplitude_trend, result.modulation_type
5148 );
5149
5150 assert!(result.is_seasonal, "Should detect seasonality");
5151 assert!(
5152 result.has_modulation,
5153 "Fading amplitude should have modulation"
5154 );
5155 assert!(
5156 result.amplitude_trend < 0.0,
5157 "Trend should be negative for fading"
5158 );
5159 }
5160
5161 #[test]
5162 fn test_seasonal_strength_wavelet() {
5163 let m = 200;
5164 let period = 0.2;
5165 let argvals: Vec<f64> = (0..m).map(|i| i as f64 / (m - 1) as f64).collect();
5166
5167 let seasonal_data: Vec<f64> = argvals
5169 .iter()
5170 .map(|&t| (2.0 * PI * t / period).sin())
5171 .collect();
5172
5173 let strength = seasonal_strength_wavelet(&seasonal_data, 1, m, &argvals, period);
5174 eprintln!("Wavelet strength (pure sine): {:.4}", strength);
5175 assert!(
5176 strength > 0.5,
5177 "Pure sine should have high wavelet strength"
5178 );
5179
5180 let noise_data: Vec<f64> = (0..m)
5182 .map(|i| ((i * 12345 + 67890) % 1000) as f64 / 1000.0 - 0.5)
5183 .collect();
5184
5185 let noise_strength = seasonal_strength_wavelet(&noise_data, 1, m, &argvals, period);
5186 eprintln!("Wavelet strength (noise): {:.4}", noise_strength);
5187 assert!(
5188 noise_strength < 0.3,
5189 "Noise should have low wavelet strength"
5190 );
5191
5192 let wrong_period_strength =
5194 seasonal_strength_wavelet(&seasonal_data, 1, m, &argvals, period * 2.0);
5195 eprintln!(
5196 "Wavelet strength (wrong period): {:.4}",
5197 wrong_period_strength
5198 );
5199 assert!(
5200 wrong_period_strength < strength,
5201 "Wrong period should have lower strength"
5202 );
5203 }
5204
5205 #[test]
5206 fn test_compute_mean_curve() {
5207 let data = vec![1.0, 2.0, 2.0, 4.0, 3.0, 6.0]; let mean = compute_mean_curve(&data, 2, 3);
5213 assert_eq!(mean.len(), 3);
5214 assert!((mean[0] - 1.5).abs() < 1e-10);
5215 assert!((mean[1] - 3.0).abs() < 1e-10);
5216 assert!((mean[2] - 4.5).abs() < 1e-10);
5217 }
5218
5219 #[test]
5220 fn test_compute_mean_curve_parallel_consistency() {
5221 let n = 10;
5223 let m = 200;
5224 let data: Vec<f64> = (0..n * m).map(|i| (i as f64 * 0.1).sin()).collect();
5225
5226 let seq_result = compute_mean_curve_impl(&data, n, m, false);
5227 let par_result = compute_mean_curve_impl(&data, n, m, true);
5228
5229 assert_eq!(seq_result.len(), par_result.len());
5230 for (s, p) in seq_result.iter().zip(par_result.iter()) {
5231 assert!(
5232 (s - p).abs() < 1e-10,
5233 "Sequential and parallel results differ"
5234 );
5235 }
5236 }
5237
5238 #[test]
5239 fn test_interior_bounds() {
5240 let bounds = interior_bounds(100);
5242 assert!(bounds.is_some());
5243 let (start, end) = bounds.unwrap();
5244 assert_eq!(start, 10);
5245 assert_eq!(end, 90);
5246
5247 let bounds = interior_bounds(10);
5249 assert!(bounds.is_some());
5250 let (start, end) = bounds.unwrap();
5251 assert!(start < end);
5252
5253 let bounds = interior_bounds(2);
5255 assert!(bounds.is_some() || bounds.is_none());
5257 }
5258
5259 #[test]
5260 fn test_hilbert_transform_pure_sine() {
5261 let m = 200;
5263 let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.1).collect();
5264 let signal: Vec<f64> = argvals.iter().map(|&t| (2.0 * PI * t).sin()).collect();
5265
5266 let analytic = hilbert_transform(&signal);
5267 assert_eq!(analytic.len(), m);
5268
5269 for c in analytic.iter().skip(10).take(m - 20) {
5271 let amp = c.norm();
5272 assert!(
5273 (amp - 1.0).abs() < 0.1,
5274 "Amplitude should be ~1, got {}",
5275 amp
5276 );
5277 }
5278 }
5279
5280 #[test]
5281 fn test_sazed_pure_sine() {
5282 let m = 200;
5284 let period = 2.0;
5285 let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.1).collect();
5286 let data: Vec<f64> = argvals
5287 .iter()
5288 .map(|&t| (2.0 * PI * t / period).sin())
5289 .collect();
5290
5291 let result = sazed(&data, &argvals, None);
5292
5293 assert!(result.period.is_finite(), "SAZED should detect a period");
5294 assert!(
5295 (result.period - period).abs() < 0.3,
5296 "Expected period ~{}, got {}",
5297 period,
5298 result.period
5299 );
5300 assert!(
5301 result.confidence > 0.4,
5302 "Expected confidence > 0.4, got {}",
5303 result.confidence
5304 );
5305 assert!(
5306 result.agreeing_components >= 2,
5307 "Expected at least 2 agreeing components, got {}",
5308 result.agreeing_components
5309 );
5310 }
5311
5312 #[test]
5313 fn test_sazed_noisy_sine() {
5314 let m = 300;
5316 let period = 3.0;
5317 let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.1).collect();
5318
5319 let data: Vec<f64> = argvals
5321 .iter()
5322 .enumerate()
5323 .map(|(i, &t)| {
5324 let signal = (2.0 * PI * t / period).sin();
5325 let noise = 0.1 * (17.3 * i as f64).sin();
5326 signal + noise
5327 })
5328 .collect();
5329
5330 let result = sazed(&data, &argvals, Some(0.15));
5331
5332 assert!(
5333 result.period.is_finite(),
5334 "SAZED should detect a period even with noise"
5335 );
5336 assert!(
5337 (result.period - period).abs() < 0.5,
5338 "Expected period ~{}, got {}",
5339 period,
5340 result.period
5341 );
5342 }
5343
5344 #[test]
5345 fn test_sazed_fdata() {
5346 let n = 5;
5348 let m = 200;
5349 let period = 2.0;
5350 let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.1).collect();
5351 let data = generate_sine(n, m, period, &argvals);
5352
5353 let result = sazed_fdata(&data, n, m, &argvals, None);
5354
5355 assert!(result.period.is_finite(), "SAZED should detect a period");
5356 assert!(
5357 (result.period - period).abs() < 0.3,
5358 "Expected period ~{}, got {}",
5359 period,
5360 result.period
5361 );
5362 }
5363
5364 #[test]
5365 fn test_sazed_short_series() {
5366 let argvals: Vec<f64> = (0..5).map(|i| i as f64).collect();
5368 let data: Vec<f64> = argvals.iter().map(|&t| t.sin()).collect();
5369
5370 let result = sazed(&data, &argvals, None);
5371
5372 assert!(
5374 result.period.is_nan() || result.period.is_finite(),
5375 "Should return NaN or valid period"
5376 );
5377 }
5378
5379 #[test]
5380 fn test_autoperiod_pure_sine() {
5381 let m = 200;
5383 let period = 2.0;
5384 let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.1).collect();
5385 let data: Vec<f64> = argvals
5386 .iter()
5387 .map(|&t| (2.0 * PI * t / period).sin())
5388 .collect();
5389
5390 let result = autoperiod(&data, &argvals, None, None);
5391
5392 assert!(
5393 result.period.is_finite(),
5394 "Autoperiod should detect a period"
5395 );
5396 assert!(
5397 (result.period - period).abs() < 0.3,
5398 "Expected period ~{}, got {}",
5399 period,
5400 result.period
5401 );
5402 assert!(
5403 result.confidence > 0.0,
5404 "Expected positive confidence, got {}",
5405 result.confidence
5406 );
5407 }
5408
5409 #[test]
5410 fn test_autoperiod_with_trend() {
5411 let m = 300;
5413 let period = 3.0;
5414 let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.1).collect();
5415 let data: Vec<f64> = argvals
5416 .iter()
5417 .map(|&t| 0.2 * t + (2.0 * PI * t / period).sin())
5418 .collect();
5419
5420 let result = autoperiod(&data, &argvals, None, None);
5421
5422 assert!(
5423 result.period.is_finite(),
5424 "Autoperiod should detect a period"
5425 );
5426 assert!(
5428 (result.period - period).abs() < 0.5,
5429 "Expected period ~{}, got {}",
5430 period,
5431 result.period
5432 );
5433 }
5434
5435 #[test]
5436 fn test_autoperiod_candidates() {
5437 let m = 200;
5439 let period = 2.0;
5440 let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.1).collect();
5441 let data: Vec<f64> = argvals
5442 .iter()
5443 .map(|&t| (2.0 * PI * t / period).sin())
5444 .collect();
5445
5446 let result = autoperiod(&data, &argvals, Some(5), Some(10));
5447
5448 assert!(
5449 !result.candidates.is_empty(),
5450 "Should have at least one candidate"
5451 );
5452
5453 let max_score = result
5455 .candidates
5456 .iter()
5457 .map(|c| c.combined_score)
5458 .fold(f64::NEG_INFINITY, f64::max);
5459 assert!(
5460 (result.confidence - max_score).abs() < 1e-10,
5461 "Returned confidence should match best candidate's score"
5462 );
5463 }
5464
5465 #[test]
5466 fn test_autoperiod_fdata() {
5467 let n = 5;
5469 let m = 200;
5470 let period = 2.0;
5471 let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.1).collect();
5472 let data = generate_sine(n, m, period, &argvals);
5473
5474 let result = autoperiod_fdata(&data, n, m, &argvals, None, None);
5475
5476 assert!(
5477 result.period.is_finite(),
5478 "Autoperiod should detect a period"
5479 );
5480 assert!(
5481 (result.period - period).abs() < 0.3,
5482 "Expected period ~{}, got {}",
5483 period,
5484 result.period
5485 );
5486 }
5487
5488 #[test]
5489 fn test_cfd_autoperiod_pure_sine() {
5490 let m = 200;
5492 let period = 2.0;
5493 let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.1).collect();
5494 let data: Vec<f64> = argvals
5495 .iter()
5496 .map(|&t| (2.0 * PI * t / period).sin())
5497 .collect();
5498
5499 let result = cfd_autoperiod(&data, &argvals, None, None);
5500
5501 assert!(
5502 result.period.is_finite(),
5503 "CFDAutoperiod should detect a period"
5504 );
5505 assert!(
5506 (result.period - period).abs() < 0.3,
5507 "Expected period ~{}, got {}",
5508 period,
5509 result.period
5510 );
5511 }
5512
5513 #[test]
5514 fn test_cfd_autoperiod_with_trend() {
5515 let m = 300;
5517 let period = 3.0;
5518 let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.1).collect();
5519 let data: Vec<f64> = argvals
5520 .iter()
5521 .map(|&t| 2.0 * t + (2.0 * PI * t / period).sin())
5522 .collect();
5523
5524 let result = cfd_autoperiod(&data, &argvals, None, None);
5525
5526 assert!(
5527 result.period.is_finite(),
5528 "CFDAutoperiod should detect a period despite trend"
5529 );
5530 assert!(
5532 (result.period - period).abs() < 0.6,
5533 "Expected period ~{}, got {}",
5534 period,
5535 result.period
5536 );
5537 }
5538
5539 #[test]
5540 fn test_cfd_autoperiod_multiple_periods() {
5541 let m = 400;
5543 let period1 = 2.0;
5544 let period2 = 5.0;
5545 let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.1).collect();
5546 let data: Vec<f64> = argvals
5547 .iter()
5548 .map(|&t| (2.0 * PI * t / period1).sin() + 0.5 * (2.0 * PI * t / period2).sin())
5549 .collect();
5550
5551 let result = cfd_autoperiod(&data, &argvals, None, None);
5552
5553 assert!(
5554 !result.periods.is_empty(),
5555 "Should detect at least one period"
5556 );
5557 let close_to_p1 = (result.period - period1).abs() < 0.5;
5559 let close_to_p2 = (result.period - period2).abs() < 1.0;
5560 assert!(
5561 close_to_p1 || close_to_p2,
5562 "Primary period {} not close to {} or {}",
5563 result.period,
5564 period1,
5565 period2
5566 );
5567 }
5568
5569 #[test]
5570 fn test_cfd_autoperiod_fdata() {
5571 let n = 5;
5573 let m = 200;
5574 let period = 2.0;
5575 let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.1).collect();
5576 let data = generate_sine(n, m, period, &argvals);
5577
5578 let result = cfd_autoperiod_fdata(&data, n, m, &argvals, None, None);
5579
5580 assert!(
5581 result.period.is_finite(),
5582 "CFDAutoperiod should detect a period"
5583 );
5584 assert!(
5585 (result.period - period).abs() < 0.3,
5586 "Expected period ~{}, got {}",
5587 period,
5588 result.period
5589 );
5590 }
5591}