1use crate::basis::fourier_basis_with_period;
13use crate::fdata::deriv_1d;
14use crate::matrix::FdMatrix;
15use crate::{iter_maybe_parallel, slice_maybe_parallel};
16use num_complex::Complex;
17#[cfg(feature = "parallel")]
18use rayon::iter::ParallelIterator;
19use rustfft::FftPlanner;
20use std::f64::consts::PI;
21
22#[derive(Debug, Clone)]
24pub struct PeriodEstimate {
25 pub period: f64,
27 pub frequency: f64,
29 pub power: f64,
31 pub confidence: f64,
33}
34
35#[derive(Debug, Clone)]
37pub struct Peak {
38 pub time: f64,
40 pub value: f64,
42 pub prominence: f64,
44}
45
46#[derive(Debug, Clone)]
48pub struct PeakDetectionResult {
49 pub peaks: Vec<Vec<Peak>>,
51 pub inter_peak_distances: Vec<Vec<f64>>,
53 pub mean_period: f64,
55}
56
57#[derive(Debug, Clone)]
59pub struct DetectedPeriod {
60 pub period: f64,
62 pub confidence: f64,
64 pub strength: f64,
66 pub amplitude: f64,
68 pub phase: f64,
70 pub iteration: usize,
72}
73
74#[derive(Debug, Clone, Copy, PartialEq, Eq)]
76pub enum StrengthMethod {
77 Variance,
79 Spectral,
81}
82
83#[derive(Debug, Clone)]
85pub struct ChangePoint {
86 pub time: f64,
88 pub change_type: ChangeType,
90 pub strength_before: f64,
92 pub strength_after: f64,
94}
95
96#[derive(Debug, Clone, Copy, PartialEq, Eq)]
98pub enum ChangeType {
99 Onset,
101 Cessation,
103}
104
105#[derive(Debug, Clone)]
107pub struct ChangeDetectionResult {
108 pub change_points: Vec<ChangePoint>,
110 pub strength_curve: Vec<f64>,
112}
113
114#[derive(Debug, Clone)]
116pub struct InstantaneousPeriod {
117 pub period: Vec<f64>,
119 pub frequency: Vec<f64>,
121 pub amplitude: Vec<f64>,
123}
124
125#[derive(Debug, Clone)]
127pub struct PeakTimingResult {
128 pub peak_times: Vec<f64>,
130 pub peak_values: Vec<f64>,
132 pub normalized_timing: Vec<f64>,
134 pub mean_timing: f64,
136 pub std_timing: f64,
138 pub range_timing: f64,
140 pub variability_score: f64,
142 pub timing_trend: f64,
144 pub cycle_indices: Vec<usize>,
146}
147
148#[derive(Debug, Clone, Copy, PartialEq, Eq)]
150pub enum SeasonalType {
151 StableSeasonal,
153 VariableTiming,
155 IntermittentSeasonal,
157 NonSeasonal,
159}
160
161#[derive(Debug, Clone)]
163pub struct SeasonalityClassification {
164 pub is_seasonal: bool,
166 pub has_stable_timing: bool,
168 pub timing_variability: f64,
170 pub seasonal_strength: f64,
172 pub cycle_strengths: Vec<f64>,
174 pub weak_seasons: Vec<usize>,
176 pub classification: SeasonalType,
178 pub peak_timing: Option<PeakTimingResult>,
180}
181
182#[derive(Debug, Clone, Copy, PartialEq)]
184pub enum ThresholdMethod {
185 Fixed(f64),
187 Percentile(f64),
189 Otsu,
191}
192
193#[derive(Debug, Clone, Copy, PartialEq, Eq)]
195pub enum ModulationType {
196 Stable,
198 Emerging,
200 Fading,
202 Oscillating,
204 NonSeasonal,
206}
207
208#[derive(Debug, Clone)]
210pub struct AmplitudeModulationResult {
211 pub is_seasonal: bool,
213 pub seasonal_strength: f64,
215 pub has_modulation: bool,
217 pub modulation_type: ModulationType,
219 pub modulation_score: f64,
221 pub amplitude_trend: f64,
223 pub strength_curve: Vec<f64>,
225 pub time_points: Vec<f64>,
227 pub min_strength: f64,
229 pub max_strength: f64,
231}
232
233#[derive(Debug, Clone)]
235pub struct WaveletAmplitudeResult {
236 pub is_seasonal: bool,
238 pub seasonal_strength: f64,
240 pub has_modulation: bool,
242 pub modulation_type: ModulationType,
244 pub modulation_score: f64,
246 pub amplitude_trend: f64,
248 pub wavelet_amplitude: Vec<f64>,
250 pub time_points: Vec<f64>,
252 pub scale: f64,
254}
255
256#[inline]
269fn compute_mean_curve_impl(data: &FdMatrix, parallel: bool) -> Vec<f64> {
270 let (n, m) = data.shape();
271 if parallel && m >= 100 {
272 iter_maybe_parallel!(0..m)
274 .map(|j| {
275 let mut sum = 0.0;
276 for i in 0..n {
277 sum += data[(i, j)];
278 }
279 sum / n as f64
280 })
281 .collect()
282 } else {
283 (0..m)
285 .map(|j| {
286 let mut sum = 0.0;
287 for i in 0..n {
288 sum += data[(i, j)];
289 }
290 sum / n as f64
291 })
292 .collect()
293 }
294}
295
296#[inline]
298fn compute_mean_curve(data: &FdMatrix) -> Vec<f64> {
299 compute_mean_curve_impl(data, true)
300}
301
302#[inline]
312fn interior_bounds(m: usize) -> Option<(usize, usize)> {
313 let edge_skip = (m as f64 * 0.1) as usize;
314 let interior_start = edge_skip.min(m / 4);
315 let interior_end = m.saturating_sub(edge_skip).max(m * 3 / 4);
316
317 if interior_end <= interior_start {
318 None
319 } else {
320 Some((interior_start, interior_end))
321 }
322}
323
324fn valid_interior_bounds(m: usize, min_span: usize) -> Option<(usize, usize)> {
326 interior_bounds(m).filter(|&(s, e)| e > s + min_span)
327}
328
329fn periodogram(data: &[f64], argvals: &[f64]) -> (Vec<f64>, Vec<f64>) {
332 let n = data.len();
333 if n < 2 || argvals.len() != n {
334 return (Vec::new(), Vec::new());
335 }
336
337 let dt = (argvals[n - 1] - argvals[0]) / (n - 1) as f64;
338 let fs = 1.0 / dt; let mut planner = FftPlanner::<f64>::new();
341 let fft = planner.plan_fft_forward(n);
342
343 let mut buffer: Vec<Complex<f64>> = data.iter().map(|&x| Complex::new(x, 0.0)).collect();
344 fft.process(&mut buffer);
345
346 let n_freq = n / 2 + 1;
348 let mut power = Vec::with_capacity(n_freq);
349 let mut frequencies = Vec::with_capacity(n_freq);
350
351 for k in 0..n_freq {
352 let freq = k as f64 * fs / n as f64;
353 frequencies.push(freq);
354
355 let p = buffer[k].norm_sqr() / (n as f64 * n as f64);
356 let p = if k > 0 && k < n / 2 { 2.0 * p } else { p };
358 power.push(p);
359 }
360
361 (frequencies, power)
362}
363
364fn autocorrelation(data: &[f64], max_lag: usize) -> Vec<f64> {
366 let n = data.len();
367 if n == 0 {
368 return Vec::new();
369 }
370
371 let mean: f64 = data.iter().sum::<f64>() / n as f64;
372 let var: f64 = data.iter().map(|&x| (x - mean).powi(2)).sum::<f64>() / n as f64;
373
374 if var < 1e-15 {
375 return vec![1.0; max_lag.min(n) + 1];
376 }
377
378 let max_lag = max_lag.min(n - 1);
379 let mut acf = Vec::with_capacity(max_lag + 1);
380
381 for lag in 0..=max_lag {
382 let mut sum = 0.0;
383 for i in 0..(n - lag) {
384 sum += (data[i] - mean) * (data[i + lag] - mean);
385 }
386 acf.push(sum / (n as f64 * var));
387 }
388
389 acf
390}
391
392fn try_add_peak(peaks: &mut Vec<usize>, candidate: usize, signal: &[f64], min_distance: usize) {
394 if let Some(&last) = peaks.last() {
395 if candidate - last >= min_distance {
396 peaks.push(candidate);
397 } else if signal[candidate] > signal[last] {
398 *peaks.last_mut().unwrap_or(&mut 0) = candidate;
400 }
401 } else {
402 peaks.push(candidate);
403 }
404}
405
406fn find_peaks_1d(signal: &[f64], min_distance: usize) -> Vec<usize> {
408 let n = signal.len();
409 if n < 3 {
410 return Vec::new();
411 }
412
413 let mut peaks = Vec::new();
414
415 for i in 1..(n - 1) {
416 if signal[i] > signal[i - 1] && signal[i] > signal[i + 1] {
417 try_add_peak(&mut peaks, i, signal, min_distance);
418 }
419 }
420
421 peaks
422}
423
424fn compute_prominence(signal: &[f64], peak_idx: usize) -> f64 {
426 let n = signal.len();
427 let peak_val = signal[peak_idx];
428
429 let mut left_min = peak_val;
431 for i in (0..peak_idx).rev() {
432 if signal[i] >= peak_val {
433 break;
434 }
435 left_min = left_min.min(signal[i]);
436 }
437
438 let mut right_min = peak_val;
439 for i in (peak_idx + 1)..n {
440 if signal[i] >= peak_val {
441 break;
442 }
443 right_min = right_min.min(signal[i]);
444 }
445
446 peak_val - left_min.max(right_min)
447}
448
449pub fn hilbert_transform(signal: &[f64]) -> Vec<Complex<f64>> {
457 let n = signal.len();
458 if n == 0 {
459 return Vec::new();
460 }
461
462 let mut planner = FftPlanner::<f64>::new();
463 let fft_forward = planner.plan_fft_forward(n);
464 let fft_inverse = planner.plan_fft_inverse(n);
465
466 let mut buffer: Vec<Complex<f64>> = signal.iter().map(|&x| Complex::new(x, 0.0)).collect();
468 fft_forward.process(&mut buffer);
469
470 let half = n / 2;
473 for k in 1..half {
474 buffer[k] *= 2.0;
475 }
476 for k in (half + 1)..n {
477 buffer[k] = Complex::new(0.0, 0.0);
478 }
479
480 fft_inverse.process(&mut buffer);
482
483 for c in buffer.iter_mut() {
485 *c /= n as f64;
486 }
487
488 buffer
489}
490
491fn unwrap_phase(phase: &[f64]) -> Vec<f64> {
493 if phase.is_empty() {
494 return Vec::new();
495 }
496
497 let mut unwrapped = vec![phase[0]];
498 let mut cumulative_correction = 0.0;
499
500 for i in 1..phase.len() {
501 let diff = phase[i] - phase[i - 1];
502
503 if diff > PI {
505 cumulative_correction -= 2.0 * PI;
506 } else if diff < -PI {
507 cumulative_correction += 2.0 * PI;
508 }
509
510 unwrapped.push(phase[i] + cumulative_correction);
511 }
512
513 unwrapped
514}
515
516fn morlet_wavelet(t: f64, omega0: f64) -> Complex<f64> {
523 let gaussian = (-t * t / 2.0).exp();
524 let oscillation = Complex::new((omega0 * t).cos(), (omega0 * t).sin());
525 oscillation * gaussian
526}
527
528#[allow(dead_code)]
542fn cwt_morlet(signal: &[f64], argvals: &[f64], scale: f64, omega0: f64) -> Vec<Complex<f64>> {
543 let n = signal.len();
544 if n == 0 || scale <= 0.0 {
545 return Vec::new();
546 }
547
548 let dt = (argvals[n - 1] - argvals[0]) / (n - 1) as f64;
549
550 let norm = 1.0 / scale.sqrt();
553
554 (0..n)
555 .map(|b| {
556 let mut sum = Complex::new(0.0, 0.0);
557 for k in 0..n {
558 let t_normalized = (argvals[k] - argvals[b]) / scale;
559 if t_normalized.abs() < 6.0 {
561 let wavelet = morlet_wavelet(t_normalized, omega0);
562 sum += signal[k] * wavelet.conj();
563 }
564 }
565 sum * norm * dt
566 })
567 .collect()
568}
569
570fn cwt_morlet_fft(signal: &[f64], argvals: &[f64], scale: f64, omega0: f64) -> Vec<Complex<f64>> {
574 let n = signal.len();
575 if n == 0 || scale <= 0.0 {
576 return Vec::new();
577 }
578
579 let dt = (argvals[n - 1] - argvals[0]) / (n - 1) as f64;
580 let norm = 1.0 / scale.sqrt();
581
582 let wavelet_time: Vec<Complex<f64>> = (0..n)
584 .map(|k| {
585 let t = if k <= n / 2 {
587 k as f64 * dt / scale
588 } else {
589 (k as f64 - n as f64) * dt / scale
590 };
591 morlet_wavelet(t, omega0) * norm
592 })
593 .collect();
594
595 let mut planner = FftPlanner::<f64>::new();
596 let fft_forward = planner.plan_fft_forward(n);
597 let fft_inverse = planner.plan_fft_inverse(n);
598
599 let mut signal_fft: Vec<Complex<f64>> = signal.iter().map(|&x| Complex::new(x, 0.0)).collect();
601 fft_forward.process(&mut signal_fft);
602
603 let mut wavelet_fft = wavelet_time;
605 fft_forward.process(&mut wavelet_fft);
606
607 let mut result: Vec<Complex<f64>> = signal_fft
609 .iter()
610 .zip(wavelet_fft.iter())
611 .map(|(s, w)| *s * w.conj())
612 .collect();
613
614 fft_inverse.process(&mut result);
616
617 for c in result.iter_mut() {
619 *c *= dt / n as f64;
620 }
621
622 result
623}
624
625fn otsu_threshold(values: &[f64]) -> f64 {
630 if values.is_empty() {
631 return 0.5;
632 }
633
634 let valid: Vec<f64> = values.iter().copied().filter(|x| x.is_finite()).collect();
636 if valid.is_empty() {
637 return 0.5;
638 }
639
640 let vmin = valid.iter().cloned().fold(f64::INFINITY, f64::min);
641 let vmax = valid.iter().cloned().fold(f64::NEG_INFINITY, f64::max);
642
643 if (vmax - vmin).abs() < 1e-10 {
644 return (vmin + vmax) / 2.0;
645 }
646
647 let n_bins = 256;
648 let (histogram, hist_min, bin_width) = build_histogram(&valid, n_bins);
649 let (best_bin, _) = find_optimal_threshold_bin(&histogram, valid.len() as f64);
650
651 hist_min + (best_bin as f64 + 0.5) * bin_width
652}
653
654fn linear_slope(x: &[f64], y: &[f64]) -> f64 {
656 if x.len() != y.len() || x.len() < 2 {
657 return 0.0;
658 }
659
660 let n = x.len() as f64;
661 let mean_x: f64 = x.iter().sum::<f64>() / n;
662 let mean_y: f64 = y.iter().sum::<f64>() / n;
663
664 let mut num = 0.0;
665 let mut den = 0.0;
666
667 for (&xi, &yi) in x.iter().zip(y.iter()) {
668 num += (xi - mean_x) * (yi - mean_y);
669 den += (xi - mean_x).powi(2);
670 }
671
672 if den.abs() < 1e-15 {
673 0.0
674 } else {
675 num / den
676 }
677}
678
679struct AmplitudeEnvelopeStats {
681 modulation_score: f64,
682 amplitude_trend: f64,
683 has_modulation: bool,
684 modulation_type: ModulationType,
685 _mean_amp: f64,
686 min_amp: f64,
687 max_amp: f64,
688}
689
690fn analyze_amplitude_envelope(
692 interior_envelope: &[f64],
693 interior_times: &[f64],
694 modulation_threshold: f64,
695) -> AmplitudeEnvelopeStats {
696 let n_interior = interior_envelope.len() as f64;
697
698 let mean_amp = interior_envelope.iter().sum::<f64>() / n_interior;
699 let min_amp = interior_envelope
700 .iter()
701 .cloned()
702 .fold(f64::INFINITY, f64::min);
703 let max_amp = interior_envelope
704 .iter()
705 .cloned()
706 .fold(f64::NEG_INFINITY, f64::max);
707
708 let variance = interior_envelope
709 .iter()
710 .map(|&a| (a - mean_amp).powi(2))
711 .sum::<f64>()
712 / n_interior;
713 let std_amp = variance.sqrt();
714 let modulation_score = if mean_amp > 1e-10 {
715 std_amp / mean_amp
716 } else {
717 0.0
718 };
719
720 let t_mean = interior_times.iter().sum::<f64>() / n_interior;
721 let mut cov_ta = 0.0;
722 let mut var_t = 0.0;
723 for (&t, &a) in interior_times.iter().zip(interior_envelope.iter()) {
724 cov_ta += (t - t_mean) * (a - mean_amp);
725 var_t += (t - t_mean).powi(2);
726 }
727 let slope = if var_t > 1e-10 { cov_ta / var_t } else { 0.0 };
728
729 let time_span = interior_times.last().unwrap_or(&1.0) - interior_times.first().unwrap_or(&0.0);
730 let amplitude_trend = if mean_amp > 1e-10 && time_span > 1e-10 {
731 (slope * time_span / mean_amp).clamp(-1.0, 1.0)
732 } else {
733 0.0
734 };
735
736 let has_modulation = modulation_score > modulation_threshold;
737 let modulation_type = if !has_modulation {
738 ModulationType::Stable
739 } else if amplitude_trend > 0.3 {
740 ModulationType::Emerging
741 } else if amplitude_trend < -0.3 {
742 ModulationType::Fading
743 } else {
744 ModulationType::Oscillating
745 };
746
747 AmplitudeEnvelopeStats {
748 modulation_score,
749 amplitude_trend,
750 has_modulation,
751 modulation_type,
752 _mean_amp: mean_amp,
753 min_amp,
754 max_amp,
755 }
756}
757
758fn fit_and_subtract_sinusoid(
760 residual: &mut [f64],
761 argvals: &[f64],
762 period: f64,
763) -> (f64, f64, f64, f64) {
764 let m = residual.len();
765 let omega = 2.0 * PI / period;
766 let mut cos_sum = 0.0;
767 let mut sin_sum = 0.0;
768
769 for (j, &t) in argvals.iter().enumerate() {
770 cos_sum += residual[j] * (omega * t).cos();
771 sin_sum += residual[j] * (omega * t).sin();
772 }
773
774 let a = 2.0 * cos_sum / m as f64;
775 let b = 2.0 * sin_sum / m as f64;
776 let amplitude = (a * a + b * b).sqrt();
777 let phase = b.atan2(a);
778
779 for (j, &t) in argvals.iter().enumerate() {
780 residual[j] -= a * (omega * t).cos() + b * (omega * t).sin();
781 }
782
783 (a, b, amplitude, phase)
784}
785
786fn validate_sazed_component(
788 period: f64,
789 confidence: f64,
790 min_period: f64,
791 max_period: f64,
792 threshold: f64,
793) -> Option<f64> {
794 if period.is_finite() && period > min_period && period < max_period && confidence > threshold {
795 Some(period)
796 } else {
797 None
798 }
799}
800
801fn count_agreeing_periods(periods: &[f64], reference: f64, tolerance: f64) -> (usize, f64) {
803 let mut count = 0;
804 let mut sum = 0.0;
805 for &p in periods {
806 let rel_diff = (reference - p).abs() / reference.max(p);
807 if rel_diff <= tolerance {
808 count += 1;
809 sum += p;
810 }
811 }
812 (count, sum)
813}
814
815fn find_acf_descent_end(acf: &[f64]) -> usize {
817 for i in 1..acf.len() {
818 if acf[i] < 0.0 {
819 return i;
820 }
821 if i > 1 && acf[i] > acf[i - 1] {
822 return i - 1;
823 }
824 }
825 1
826}
827
828fn find_first_acf_peak(acf: &[f64]) -> Option<(usize, f64)> {
830 if acf.len() < 4 {
831 return None;
832 }
833
834 let min_search_start = find_acf_descent_end(acf);
835 let peaks = find_peaks_1d(&acf[min_search_start..], 1);
836 if peaks.is_empty() {
837 return None;
838 }
839
840 let peak_lag = peaks[0] + min_search_start;
841 Some((peak_lag, acf[peak_lag].max(0.0)))
842}
843
844fn compute_cycle_strengths(
846 data: &FdMatrix,
847 argvals: &[f64],
848 period: f64,
849 strength_thresh: f64,
850) -> (Vec<f64>, Vec<usize>) {
851 let (n, m) = data.shape();
852 let t_start = argvals[0];
853 let t_end = argvals[m - 1];
854 let n_cycles = ((t_end - t_start) / period).floor() as usize;
855
856 let mut cycle_strengths = Vec::with_capacity(n_cycles);
857 let mut weak_seasons = Vec::new();
858
859 for cycle in 0..n_cycles {
860 let cycle_start = t_start + cycle as f64 * period;
861 let cycle_end = cycle_start + period;
862
863 let start_idx = argvals.iter().position(|&t| t >= cycle_start).unwrap_or(0);
864 let end_idx = argvals.iter().position(|&t| t > cycle_end).unwrap_or(m);
865
866 let cycle_m = end_idx - start_idx;
867 if cycle_m < 4 {
868 cycle_strengths.push(f64::NAN);
869 continue;
870 }
871
872 let cycle_data: Vec<f64> = (start_idx..end_idx)
873 .flat_map(|j| (0..n).map(move |i| data[(i, j)]))
874 .collect();
875 let cycle_mat = FdMatrix::from_column_major(cycle_data, n, cycle_m).unwrap();
876 let cycle_argvals: Vec<f64> = argvals[start_idx..end_idx].to_vec();
877
878 let strength = seasonal_strength_variance(&cycle_mat, &cycle_argvals, period, 3);
879
880 cycle_strengths.push(strength);
881 if strength < strength_thresh {
882 weak_seasons.push(cycle);
883 }
884 }
885
886 (cycle_strengths, weak_seasons)
887}
888
889fn build_histogram(valid: &[f64], n_bins: usize) -> (Vec<usize>, f64, f64) {
891 let min_val = valid.iter().cloned().fold(f64::INFINITY, f64::min);
892 let max_val = valid.iter().cloned().fold(f64::NEG_INFINITY, f64::max);
893 let bin_width = (max_val - min_val) / n_bins as f64;
894 let mut histogram = vec![0usize; n_bins];
895 for &v in valid {
896 let bin = ((v - min_val) / bin_width).min(n_bins as f64 - 1.0) as usize;
897 histogram[bin] += 1;
898 }
899 (histogram, min_val, bin_width)
900}
901
902fn find_optimal_threshold_bin(histogram: &[usize], total: f64) -> (usize, f64) {
904 let n_bins = histogram.len();
905 let mut sum_total = 0.0;
906 for (i, &count) in histogram.iter().enumerate() {
907 sum_total += i as f64 * count as f64;
908 }
909
910 let mut best_bin = 0;
911 let mut best_variance = 0.0;
912 let mut sum_b = 0.0;
913 let mut weight_b = 0.0;
914
915 for t in 0..n_bins {
916 weight_b += histogram[t] as f64;
917 if weight_b == 0.0 {
918 continue;
919 }
920 let weight_f = total - weight_b;
921 if weight_f == 0.0 {
922 break;
923 }
924 sum_b += t as f64 * histogram[t] as f64;
925 let mean_b = sum_b / weight_b;
926 let mean_f = (sum_total - sum_b) / weight_f;
927 let variance = weight_b * weight_f * (mean_b - mean_f).powi(2);
928 if variance > best_variance {
929 best_variance = variance;
930 best_bin = t;
931 }
932 }
933
934 (best_bin, best_variance)
935}
936
937fn sum_harmonic_power(
939 frequencies: &[f64],
940 power: &[f64],
941 fundamental_freq: f64,
942 tolerance: f64,
943) -> (f64, f64) {
944 let mut seasonal_power = 0.0;
945 let mut total_power = 0.0;
946
947 for (i, (&freq, &p)) in frequencies.iter().zip(power.iter()).enumerate() {
948 if i == 0 {
949 continue;
950 }
951 total_power += p;
952 let ratio = freq / fundamental_freq;
953 let nearest_harmonic = ratio.round();
954 if (ratio - nearest_harmonic).abs() < tolerance && nearest_harmonic >= 1.0 {
955 seasonal_power += p;
956 }
957 }
958
959 (seasonal_power, total_power)
960}
961
962fn crossing_direction(
966 ss: f64,
967 threshold: f64,
968 in_seasonal: bool,
969 i: usize,
970 last_change_idx: Option<usize>,
971 min_dur_points: usize,
972) -> Option<bool> {
973 if ss.is_nan() {
974 return None;
975 }
976 let now_seasonal = ss > threshold;
977 if now_seasonal == in_seasonal {
978 return None;
979 }
980 if last_change_idx.is_some_and(|last_idx| i - last_idx < min_dur_points) {
981 return None;
982 }
983 Some(now_seasonal)
984}
985
986fn build_change_point(
988 i: usize,
989 ss: f64,
990 now_seasonal: bool,
991 strength_curve: &[f64],
992 argvals: &[f64],
993) -> ChangePoint {
994 let change_type = if now_seasonal {
995 ChangeType::Onset
996 } else {
997 ChangeType::Cessation
998 };
999 let strength_before = if i > 0 && !strength_curve[i - 1].is_nan() {
1000 strength_curve[i - 1]
1001 } else {
1002 ss
1003 };
1004 ChangePoint {
1005 time: argvals[i],
1006 change_type,
1007 strength_before,
1008 strength_after: ss,
1009 }
1010}
1011
1012fn detect_threshold_crossings(
1014 strength_curve: &[f64],
1015 argvals: &[f64],
1016 threshold: f64,
1017 min_dur_points: usize,
1018) -> Vec<ChangePoint> {
1019 let mut change_points = Vec::new();
1020 let mut in_seasonal = strength_curve[0] > threshold;
1021 let mut last_change_idx: Option<usize> = None;
1022
1023 for (i, &ss) in strength_curve.iter().enumerate().skip(1) {
1024 let Some(now_seasonal) = crossing_direction(
1025 ss,
1026 threshold,
1027 in_seasonal,
1028 i,
1029 last_change_idx,
1030 min_dur_points,
1031 ) else {
1032 continue;
1033 };
1034
1035 change_points.push(build_change_point(
1036 i,
1037 ss,
1038 now_seasonal,
1039 strength_curve,
1040 argvals,
1041 ));
1042
1043 in_seasonal = now_seasonal;
1044 last_change_idx = Some(i);
1045 }
1046
1047 change_points
1048}
1049
1050pub fn estimate_period_fft(data: &FdMatrix, argvals: &[f64]) -> PeriodEstimate {
1068 let (n, m) = data.shape();
1069 if n == 0 || m < 4 || argvals.len() != m {
1070 return PeriodEstimate {
1071 period: f64::NAN,
1072 frequency: f64::NAN,
1073 power: 0.0,
1074 confidence: 0.0,
1075 };
1076 }
1077
1078 let mean_curve = compute_mean_curve(data);
1080
1081 let (frequencies, power) = periodogram(&mean_curve, argvals);
1082
1083 if frequencies.len() < 2 {
1084 return PeriodEstimate {
1085 period: f64::NAN,
1086 frequency: f64::NAN,
1087 power: 0.0,
1088 confidence: 0.0,
1089 };
1090 }
1091
1092 let mut max_power = 0.0;
1094 let mut max_idx = 1;
1095 for (i, &p) in power.iter().enumerate().skip(1) {
1096 if p > max_power {
1097 max_power = p;
1098 max_idx = i;
1099 }
1100 }
1101
1102 let dominant_freq = frequencies[max_idx];
1103 let period = if dominant_freq > 1e-15 {
1104 1.0 / dominant_freq
1105 } else {
1106 f64::INFINITY
1107 };
1108
1109 let mean_power: f64 = power.iter().skip(1).sum::<f64>() / (power.len() - 1) as f64;
1111 let confidence = if mean_power > 1e-15 {
1112 max_power / mean_power
1113 } else {
1114 0.0
1115 };
1116
1117 PeriodEstimate {
1118 period,
1119 frequency: dominant_freq,
1120 power: max_power,
1121 confidence,
1122 }
1123}
1124
1125pub fn estimate_period_acf(
1136 data: &[f64],
1137 n: usize,
1138 m: usize,
1139 argvals: &[f64],
1140 max_lag: usize,
1141) -> PeriodEstimate {
1142 if n == 0 || m < 4 || argvals.len() != m {
1143 return PeriodEstimate {
1144 period: f64::NAN,
1145 frequency: f64::NAN,
1146 power: 0.0,
1147 confidence: 0.0,
1148 };
1149 }
1150
1151 let mat = FdMatrix::from_slice(data, n, m).unwrap();
1153 let mean_curve = compute_mean_curve(&mat);
1154
1155 let acf = autocorrelation(&mean_curve, max_lag);
1156
1157 let min_lag = 2;
1159 let peaks = find_peaks_1d(&acf[min_lag..], 1);
1160
1161 if peaks.is_empty() {
1162 return PeriodEstimate {
1163 period: f64::NAN,
1164 frequency: f64::NAN,
1165 power: 0.0,
1166 confidence: 0.0,
1167 };
1168 }
1169
1170 let peak_lag = peaks[0] + min_lag;
1171 let dt = (argvals[m - 1] - argvals[0]) / (m - 1) as f64;
1172 let period = peak_lag as f64 * dt;
1173 let frequency = if period > 1e-15 { 1.0 / period } else { 0.0 };
1174
1175 PeriodEstimate {
1176 period,
1177 frequency,
1178 power: acf[peak_lag],
1179 confidence: acf[peak_lag].abs(),
1180 }
1181}
1182
1183pub fn estimate_period_regression(
1198 data: &[f64],
1199 n: usize,
1200 m: usize,
1201 argvals: &[f64],
1202 period_min: f64,
1203 period_max: f64,
1204 n_candidates: usize,
1205 n_harmonics: usize,
1206) -> PeriodEstimate {
1207 if n == 0 || m < 4 || argvals.len() != m || period_min >= period_max || n_candidates < 2 {
1208 return PeriodEstimate {
1209 period: f64::NAN,
1210 frequency: f64::NAN,
1211 power: 0.0,
1212 confidence: 0.0,
1213 };
1214 }
1215
1216 let mat = FdMatrix::from_slice(data, n, m).unwrap();
1218 let mean_curve = compute_mean_curve(&mat);
1219
1220 let nbasis = 1 + 2 * n_harmonics;
1221
1222 let candidates: Vec<f64> = (0..n_candidates)
1224 .map(|i| period_min + (period_max - period_min) * i as f64 / (n_candidates - 1) as f64)
1225 .collect();
1226
1227 let results: Vec<(f64, f64)> = slice_maybe_parallel!(candidates)
1228 .map(|&period| {
1229 let basis = fourier_basis_with_period(argvals, nbasis, period);
1230
1231 let mut rss = 0.0;
1233 for j in 0..m {
1234 let mut fitted = 0.0;
1235 for k in 0..nbasis {
1237 let b_val = basis[j + k * m];
1238 let coef: f64 = (0..m)
1239 .map(|l| mean_curve[l] * basis[l + k * m])
1240 .sum::<f64>()
1241 / (0..m)
1242 .map(|l| basis[l + k * m].powi(2))
1243 .sum::<f64>()
1244 .max(1e-15);
1245 fitted += coef * b_val;
1246 }
1247 let resid = mean_curve[j] - fitted;
1248 rss += resid * resid;
1249 }
1250
1251 (period, rss)
1252 })
1253 .collect();
1254
1255 let (best_period, min_rss) = results
1257 .iter()
1258 .min_by(|a, b| a.1.partial_cmp(&b.1).unwrap_or(std::cmp::Ordering::Equal))
1259 .cloned()
1260 .unwrap_or((f64::NAN, f64::INFINITY));
1261
1262 let mean_rss: f64 = results.iter().map(|(_, r)| r).sum::<f64>() / results.len() as f64;
1264 let confidence = if min_rss > 1e-15 {
1265 (mean_rss / min_rss).min(10.0)
1266 } else {
1267 10.0
1268 };
1269
1270 PeriodEstimate {
1271 period: best_period,
1272 frequency: if best_period > 1e-15 {
1273 1.0 / best_period
1274 } else {
1275 0.0
1276 },
1277 power: 1.0 - min_rss / mean_rss,
1278 confidence,
1279 }
1280}
1281
1282pub fn detect_multiple_periods(
1303 data: &[f64],
1304 n: usize,
1305 m: usize,
1306 argvals: &[f64],
1307 max_periods: usize,
1308 min_confidence: f64,
1309 min_strength: f64,
1310) -> Vec<DetectedPeriod> {
1311 if n == 0 || m < 4 || argvals.len() != m || max_periods == 0 {
1312 return Vec::new();
1313 }
1314
1315 let mat = FdMatrix::from_slice(data, n, m).unwrap();
1317 let mean_curve = compute_mean_curve(&mat);
1318
1319 let mut residual = mean_curve.clone();
1320 let mut detected = Vec::with_capacity(max_periods);
1321
1322 for iteration in 1..=max_periods {
1323 match evaluate_next_period(
1324 &mut residual,
1325 m,
1326 argvals,
1327 min_confidence,
1328 min_strength,
1329 iteration,
1330 ) {
1331 Some(period) => detected.push(period),
1332 None => break,
1333 }
1334 }
1335
1336 detected
1337}
1338
1339fn evaluate_next_period(
1343 residual: &mut [f64],
1344 m: usize,
1345 argvals: &[f64],
1346 min_confidence: f64,
1347 min_strength: f64,
1348 iteration: usize,
1349) -> Option<DetectedPeriod> {
1350 let residual_mat = FdMatrix::from_slice(residual, 1, m).unwrap();
1351 let est = estimate_period_fft(&residual_mat, argvals);
1352
1353 if est.confidence < min_confidence || est.period.is_nan() || est.period.is_infinite() {
1354 return None;
1355 }
1356
1357 let strength = seasonal_strength_variance(&residual_mat, argvals, est.period, 3);
1358 if strength < min_strength || strength.is_nan() {
1359 return None;
1360 }
1361
1362 let (_a, _b, amplitude, phase) = fit_and_subtract_sinusoid(residual, argvals, est.period);
1363
1364 Some(DetectedPeriod {
1365 period: est.period,
1366 confidence: est.confidence,
1367 strength,
1368 amplitude,
1369 phase,
1370 iteration,
1371 })
1372}
1373
1374fn smooth_for_peaks(
1380 data: &FdMatrix,
1381 argvals: &[f64],
1382 smooth_first: bool,
1383 smooth_nbasis: Option<usize>,
1384) -> Vec<f64> {
1385 if !smooth_first {
1386 return data.as_slice().to_vec();
1387 }
1388 let nbasis = smooth_nbasis
1389 .unwrap_or_else(|| crate::basis::select_fourier_nbasis_gcv(data, argvals, 5, 25));
1390 if let Some(result) = crate::basis::fourier_fit_1d(data, argvals, nbasis) {
1391 result.fitted.into_vec()
1392 } else {
1393 data.as_slice().to_vec()
1394 }
1395}
1396
1397fn detect_peaks_single_curve(
1399 curve: &[f64],
1400 d1: &[f64],
1401 argvals: &[f64],
1402 min_dist_points: usize,
1403 min_prominence: Option<f64>,
1404 data_range: f64,
1405) -> (Vec<Peak>, Vec<f64>) {
1406 let m = curve.len();
1407 let mut peak_indices = Vec::new();
1408 for j in 1..m {
1409 if d1[j - 1] > 0.0 && d1[j] <= 0.0 {
1410 let idx = if (d1[j - 1] - d1[j]).abs() > 1e-15 {
1411 j - 1
1412 } else {
1413 j
1414 };
1415
1416 if peak_indices.is_empty()
1417 || idx - peak_indices[peak_indices.len() - 1] >= min_dist_points
1418 {
1419 peak_indices.push(idx);
1420 }
1421 }
1422 }
1423
1424 let mut peaks: Vec<Peak> = peak_indices
1425 .iter()
1426 .map(|&idx| {
1427 let prominence = compute_prominence(curve, idx) / data_range;
1428 Peak {
1429 time: argvals[idx],
1430 value: curve[idx],
1431 prominence,
1432 }
1433 })
1434 .collect();
1435
1436 if let Some(min_prom) = min_prominence {
1437 peaks.retain(|p| p.prominence >= min_prom);
1438 }
1439
1440 let distances: Vec<f64> = peaks.windows(2).map(|w| w[1].time - w[0].time).collect();
1441
1442 (peaks, distances)
1443}
1444
1445pub fn detect_peaks(
1461 data: &FdMatrix,
1462 argvals: &[f64],
1463 min_distance: Option<f64>,
1464 min_prominence: Option<f64>,
1465 smooth_first: bool,
1466 smooth_nbasis: Option<usize>,
1467) -> PeakDetectionResult {
1468 let (n, m) = data.shape();
1469 if n == 0 || m < 3 || argvals.len() != m {
1470 return PeakDetectionResult {
1471 peaks: Vec::new(),
1472 inter_peak_distances: Vec::new(),
1473 mean_period: f64::NAN,
1474 };
1475 }
1476
1477 let dt = (argvals[m - 1] - argvals[0]) / (m - 1) as f64;
1478 let min_dist_points = min_distance.map(|d| (d / dt).round() as usize).unwrap_or(1);
1479
1480 let work_data = smooth_for_peaks(data, argvals, smooth_first, smooth_nbasis);
1481
1482 let work_mat = FdMatrix::from_column_major(work_data.clone(), n, m).unwrap();
1484 let deriv1 = deriv_1d(&work_mat, argvals, 1).into_vec();
1485
1486 let data_range: f64 = {
1488 let mut min_val = f64::INFINITY;
1489 let mut max_val = f64::NEG_INFINITY;
1490 for &v in work_data.iter() {
1491 min_val = min_val.min(v);
1492 max_val = max_val.max(v);
1493 }
1494 (max_val - min_val).max(1e-15)
1495 };
1496
1497 let results: Vec<(Vec<Peak>, Vec<f64>)> = iter_maybe_parallel!(0..n)
1499 .map(|i| {
1500 let curve: Vec<f64> = (0..m).map(|j| work_data[i + j * n]).collect();
1501 let d1: Vec<f64> = (0..m).map(|j| deriv1[i + j * n]).collect();
1502 detect_peaks_single_curve(
1503 &curve,
1504 &d1,
1505 argvals,
1506 min_dist_points,
1507 min_prominence,
1508 data_range,
1509 )
1510 })
1511 .collect();
1512
1513 let peaks: Vec<Vec<Peak>> = results.iter().map(|(p, _)| p.clone()).collect();
1514 let inter_peak_distances: Vec<Vec<f64>> = results.iter().map(|(_, d)| d.clone()).collect();
1515
1516 let all_distances: Vec<f64> = inter_peak_distances.iter().flatten().cloned().collect();
1518 let mean_period = if all_distances.is_empty() {
1519 f64::NAN
1520 } else {
1521 all_distances.iter().sum::<f64>() / all_distances.len() as f64
1522 };
1523
1524 PeakDetectionResult {
1525 peaks,
1526 inter_peak_distances,
1527 mean_period,
1528 }
1529}
1530
1531pub fn seasonal_strength_variance(
1551 data: &FdMatrix,
1552 argvals: &[f64],
1553 period: f64,
1554 n_harmonics: usize,
1555) -> f64 {
1556 let (n, m) = data.shape();
1557 if n == 0 || m < 4 || argvals.len() != m || period <= 0.0 {
1558 return f64::NAN;
1559 }
1560
1561 let mean_curve = compute_mean_curve(data);
1563
1564 let global_mean: f64 = mean_curve.iter().sum::<f64>() / m as f64;
1566 let total_var: f64 = mean_curve
1567 .iter()
1568 .map(|&x| (x - global_mean).powi(2))
1569 .sum::<f64>()
1570 / m as f64;
1571
1572 if total_var < 1e-15 {
1573 return 0.0;
1574 }
1575
1576 let nbasis = 1 + 2 * n_harmonics;
1578 let basis = fourier_basis_with_period(argvals, nbasis, period);
1579
1580 let mut seasonal = vec![0.0; m];
1582 for k in 1..nbasis {
1583 let b_sum: f64 = (0..m).map(|j| basis[j + k * m].powi(2)).sum();
1585 if b_sum > 1e-15 {
1586 let coef: f64 = (0..m)
1587 .map(|j| mean_curve[j] * basis[j + k * m])
1588 .sum::<f64>()
1589 / b_sum;
1590 for j in 0..m {
1591 seasonal[j] += coef * basis[j + k * m];
1592 }
1593 }
1594 }
1595
1596 let seasonal_mean: f64 = seasonal.iter().sum::<f64>() / m as f64;
1598 let seasonal_var: f64 = seasonal
1599 .iter()
1600 .map(|&x| (x - seasonal_mean).powi(2))
1601 .sum::<f64>()
1602 / m as f64;
1603
1604 (seasonal_var / total_var).min(1.0)
1605}
1606
1607pub fn seasonal_strength_spectral(data: &FdMatrix, argvals: &[f64], period: f64) -> f64 {
1618 let (n, m) = data.shape();
1619 if n == 0 || m < 4 || argvals.len() != m || period <= 0.0 {
1620 return f64::NAN;
1621 }
1622
1623 let mean_curve = compute_mean_curve(data);
1625
1626 let (frequencies, power) = periodogram(&mean_curve, argvals);
1627
1628 if frequencies.len() < 2 {
1629 return f64::NAN;
1630 }
1631
1632 let fundamental_freq = 1.0 / period;
1633 let (seasonal_power, total_power) =
1634 sum_harmonic_power(&frequencies, &power, fundamental_freq, 0.1);
1635
1636 if total_power < 1e-15 {
1637 return 0.0;
1638 }
1639
1640 (seasonal_power / total_power).min(1.0)
1641}
1642
1643pub fn seasonal_strength_wavelet(data: &FdMatrix, argvals: &[f64], period: f64) -> f64 {
1664 let (n, m) = data.shape();
1665 if n == 0 || m < 4 || argvals.len() != m || period <= 0.0 {
1666 return f64::NAN;
1667 }
1668
1669 let mean_curve = compute_mean_curve(data);
1671
1672 let dc: f64 = mean_curve.iter().sum::<f64>() / m as f64;
1674 let detrended: Vec<f64> = mean_curve.iter().map(|&x| x - dc).collect();
1675
1676 let total_variance: f64 = detrended.iter().map(|&x| x * x).sum::<f64>() / m as f64;
1678
1679 if total_variance < 1e-15 {
1680 return 0.0;
1681 }
1682
1683 let omega0 = 6.0;
1685 let scale = period * omega0 / (2.0 * PI);
1686 let wavelet_coeffs = cwt_morlet_fft(&detrended, argvals, scale, omega0);
1687
1688 if wavelet_coeffs.is_empty() {
1689 return f64::NAN;
1690 }
1691
1692 let (interior_start, interior_end) = match interior_bounds(m) {
1694 Some(bounds) => bounds,
1695 None => return f64::NAN,
1696 };
1697
1698 let wavelet_power: f64 = wavelet_coeffs[interior_start..interior_end]
1699 .iter()
1700 .map(|c| c.norm_sqr())
1701 .sum::<f64>()
1702 / (interior_end - interior_start) as f64;
1703
1704 (wavelet_power / total_variance).sqrt().min(1.0)
1707}
1708
1709pub fn seasonal_strength_windowed(
1723 data: &FdMatrix,
1724 argvals: &[f64],
1725 period: f64,
1726 window_size: f64,
1727 method: StrengthMethod,
1728) -> Vec<f64> {
1729 let (n, m) = data.shape();
1730 if n == 0 || m < 4 || argvals.len() != m || period <= 0.0 || window_size <= 0.0 {
1731 return Vec::new();
1732 }
1733
1734 let dt = (argvals[m - 1] - argvals[0]) / (m - 1) as f64;
1735 let half_window_points = ((window_size / 2.0) / dt).round() as usize;
1736
1737 let mean_curve = compute_mean_curve(data);
1739
1740 iter_maybe_parallel!(0..m)
1741 .map(|center| {
1742 let start = center.saturating_sub(half_window_points);
1743 let end = (center + half_window_points + 1).min(m);
1744 let window_m = end - start;
1745
1746 if window_m < 4 {
1747 return f64::NAN;
1748 }
1749
1750 let window_data: Vec<f64> = mean_curve[start..end].to_vec();
1751 let window_argvals: Vec<f64> = argvals[start..end].to_vec();
1752
1753 let single_mat = FdMatrix::from_column_major(window_data, 1, window_m).unwrap();
1755
1756 match method {
1757 StrengthMethod::Variance => {
1758 seasonal_strength_variance(&single_mat, &window_argvals, period, 3)
1759 }
1760 StrengthMethod::Spectral => {
1761 seasonal_strength_spectral(&single_mat, &window_argvals, period)
1762 }
1763 }
1764 })
1765 .collect()
1766}
1767
1768pub fn detect_seasonality_changes(
1787 data: &FdMatrix,
1788 argvals: &[f64],
1789 period: f64,
1790 threshold: f64,
1791 window_size: f64,
1792 min_duration: f64,
1793) -> ChangeDetectionResult {
1794 let (n, m) = data.shape();
1795 if n == 0 || m < 4 || argvals.len() != m {
1796 return ChangeDetectionResult {
1797 change_points: Vec::new(),
1798 strength_curve: Vec::new(),
1799 };
1800 }
1801
1802 let strength_curve =
1804 seasonal_strength_windowed(data, argvals, period, window_size, StrengthMethod::Variance);
1805
1806 if strength_curve.is_empty() {
1807 return ChangeDetectionResult {
1808 change_points: Vec::new(),
1809 strength_curve: Vec::new(),
1810 };
1811 }
1812
1813 let dt = (argvals[m - 1] - argvals[0]) / (m - 1) as f64;
1814 let min_dur_points = (min_duration / dt).round() as usize;
1815
1816 let change_points =
1817 detect_threshold_crossings(&strength_curve, argvals, threshold, min_dur_points);
1818
1819 ChangeDetectionResult {
1820 change_points,
1821 strength_curve,
1822 }
1823}
1824
1825pub fn detect_amplitude_modulation(
1864 data: &FdMatrix,
1865 argvals: &[f64],
1866 period: f64,
1867 modulation_threshold: f64,
1868 seasonality_threshold: f64,
1869) -> AmplitudeModulationResult {
1870 let (n, m) = data.shape();
1871 let empty_result = AmplitudeModulationResult {
1873 is_seasonal: false,
1874 seasonal_strength: 0.0,
1875 has_modulation: false,
1876 modulation_type: ModulationType::NonSeasonal,
1877 modulation_score: 0.0,
1878 amplitude_trend: 0.0,
1879 strength_curve: Vec::new(),
1880 time_points: Vec::new(),
1881 min_strength: 0.0,
1882 max_strength: 0.0,
1883 };
1884
1885 if n == 0 || m < 4 || argvals.len() != m || period <= 0.0 {
1886 return empty_result;
1887 }
1888
1889 let overall_strength = seasonal_strength_spectral(data, argvals, period);
1891
1892 if overall_strength < seasonality_threshold {
1893 return AmplitudeModulationResult {
1894 is_seasonal: false,
1895 seasonal_strength: overall_strength,
1896 has_modulation: false,
1897 modulation_type: ModulationType::NonSeasonal,
1898 modulation_score: 0.0,
1899 amplitude_trend: 0.0,
1900 strength_curve: Vec::new(),
1901 time_points: argvals.to_vec(),
1902 min_strength: 0.0,
1903 max_strength: 0.0,
1904 };
1905 }
1906
1907 let mean_curve = compute_mean_curve(data);
1909
1910 let dc: f64 = mean_curve.iter().sum::<f64>() / m as f64;
1912 let detrended: Vec<f64> = mean_curve.iter().map(|&x| x - dc).collect();
1913 let analytic = hilbert_transform(&detrended);
1914 let envelope: Vec<f64> = analytic.iter().map(|c| c.norm()).collect();
1915
1916 if envelope.is_empty() {
1917 return AmplitudeModulationResult {
1918 is_seasonal: true,
1919 seasonal_strength: overall_strength,
1920 has_modulation: false,
1921 modulation_type: ModulationType::Stable,
1922 modulation_score: 0.0,
1923 amplitude_trend: 0.0,
1924 strength_curve: Vec::new(),
1925 time_points: argvals.to_vec(),
1926 min_strength: 0.0,
1927 max_strength: 0.0,
1928 };
1929 }
1930
1931 let dt = (argvals[m - 1] - argvals[0]) / (m - 1) as f64;
1933 let smooth_window = ((period / dt) as usize).max(3);
1934 let half_window = smooth_window / 2;
1935
1936 let smoothed_envelope: Vec<f64> = (0..m)
1937 .map(|i| {
1938 let start = i.saturating_sub(half_window);
1939 let end = (i + half_window + 1).min(m);
1940 let sum: f64 = envelope[start..end].iter().sum();
1941 sum / (end - start) as f64
1942 })
1943 .collect();
1944
1945 let Some((interior_start, interior_end)) = valid_interior_bounds(m, 4) else {
1947 return AmplitudeModulationResult {
1948 is_seasonal: true,
1949 seasonal_strength: overall_strength,
1950 has_modulation: false,
1951 modulation_type: ModulationType::Stable,
1952 modulation_score: 0.0,
1953 amplitude_trend: 0.0,
1954 strength_curve: envelope,
1955 time_points: argvals.to_vec(),
1956 min_strength: 0.0,
1957 max_strength: 0.0,
1958 };
1959 };
1960
1961 let stats = analyze_amplitude_envelope(
1962 &smoothed_envelope[interior_start..interior_end],
1963 &argvals[interior_start..interior_end],
1964 modulation_threshold,
1965 );
1966
1967 AmplitudeModulationResult {
1968 is_seasonal: true,
1969 seasonal_strength: overall_strength,
1970 has_modulation: stats.has_modulation,
1971 modulation_type: stats.modulation_type,
1972 modulation_score: stats.modulation_score,
1973 amplitude_trend: stats.amplitude_trend,
1974 strength_curve: envelope,
1975 time_points: argvals.to_vec(),
1976 min_strength: stats.min_amp,
1977 max_strength: stats.max_amp,
1978 }
1979}
1980
1981pub fn detect_amplitude_modulation_wavelet(
2004 data: &FdMatrix,
2005 argvals: &[f64],
2006 period: f64,
2007 modulation_threshold: f64,
2008 seasonality_threshold: f64,
2009) -> WaveletAmplitudeResult {
2010 let (n, m) = data.shape();
2011 let empty_result = WaveletAmplitudeResult {
2012 is_seasonal: false,
2013 seasonal_strength: 0.0,
2014 has_modulation: false,
2015 modulation_type: ModulationType::NonSeasonal,
2016 modulation_score: 0.0,
2017 amplitude_trend: 0.0,
2018 wavelet_amplitude: Vec::new(),
2019 time_points: Vec::new(),
2020 scale: 0.0,
2021 };
2022
2023 if n == 0 || m < 4 || argvals.len() != m || period <= 0.0 {
2024 return empty_result;
2025 }
2026
2027 let overall_strength = seasonal_strength_spectral(data, argvals, period);
2029
2030 if overall_strength < seasonality_threshold {
2031 return WaveletAmplitudeResult {
2032 is_seasonal: false,
2033 seasonal_strength: overall_strength,
2034 has_modulation: false,
2035 modulation_type: ModulationType::NonSeasonal,
2036 modulation_score: 0.0,
2037 amplitude_trend: 0.0,
2038 wavelet_amplitude: Vec::new(),
2039 time_points: argvals.to_vec(),
2040 scale: 0.0,
2041 };
2042 }
2043
2044 let mean_curve = compute_mean_curve(data);
2046
2047 let dc: f64 = mean_curve.iter().sum::<f64>() / m as f64;
2049 let detrended: Vec<f64> = mean_curve.iter().map(|&x| x - dc).collect();
2050
2051 let omega0 = 6.0; let scale = period * omega0 / (2.0 * PI);
2055
2056 let wavelet_coeffs = cwt_morlet_fft(&detrended, argvals, scale, omega0);
2058
2059 if wavelet_coeffs.is_empty() {
2060 return WaveletAmplitudeResult {
2061 is_seasonal: true,
2062 seasonal_strength: overall_strength,
2063 has_modulation: false,
2064 modulation_type: ModulationType::Stable,
2065 modulation_score: 0.0,
2066 amplitude_trend: 0.0,
2067 wavelet_amplitude: Vec::new(),
2068 time_points: argvals.to_vec(),
2069 scale,
2070 };
2071 }
2072
2073 let wavelet_amplitude: Vec<f64> = wavelet_coeffs.iter().map(|c| c.norm()).collect();
2075
2076 let Some((interior_start, interior_end)) = valid_interior_bounds(m, 4) else {
2078 return WaveletAmplitudeResult {
2079 is_seasonal: true,
2080 seasonal_strength: overall_strength,
2081 has_modulation: false,
2082 modulation_type: ModulationType::Stable,
2083 modulation_score: 0.0,
2084 amplitude_trend: 0.0,
2085 wavelet_amplitude,
2086 time_points: argvals.to_vec(),
2087 scale,
2088 };
2089 };
2090
2091 let stats = analyze_amplitude_envelope(
2092 &wavelet_amplitude[interior_start..interior_end],
2093 &argvals[interior_start..interior_end],
2094 modulation_threshold,
2095 );
2096
2097 WaveletAmplitudeResult {
2098 is_seasonal: true,
2099 seasonal_strength: overall_strength,
2100 has_modulation: stats.has_modulation,
2101 modulation_type: stats.modulation_type,
2102 modulation_score: stats.modulation_score,
2103 amplitude_trend: stats.amplitude_trend,
2104 wavelet_amplitude,
2105 time_points: argvals.to_vec(),
2106 scale,
2107 }
2108}
2109
2110pub fn instantaneous_period(data: &FdMatrix, argvals: &[f64]) -> InstantaneousPeriod {
2125 let (n, m) = data.shape();
2126 if n == 0 || m < 4 || argvals.len() != m {
2127 return InstantaneousPeriod {
2128 period: Vec::new(),
2129 frequency: Vec::new(),
2130 amplitude: Vec::new(),
2131 };
2132 }
2133
2134 let mean_curve = compute_mean_curve(data);
2136
2137 let dc: f64 = mean_curve.iter().sum::<f64>() / m as f64;
2139 let detrended: Vec<f64> = mean_curve.iter().map(|&x| x - dc).collect();
2140
2141 let analytic = hilbert_transform(&detrended);
2143
2144 let amplitude: Vec<f64> = analytic.iter().map(|c| c.norm()).collect();
2146
2147 let phase: Vec<f64> = analytic.iter().map(|c| c.im.atan2(c.re)).collect();
2148
2149 let unwrapped_phase = unwrap_phase(&phase);
2151
2152 let dt = (argvals[m - 1] - argvals[0]) / (m - 1) as f64;
2154 let mut inst_freq = vec![0.0; m];
2155
2156 if m > 1 {
2158 inst_freq[0] = (unwrapped_phase[1] - unwrapped_phase[0]) / dt / (2.0 * PI);
2159 }
2160 for j in 1..(m - 1) {
2161 inst_freq[j] = (unwrapped_phase[j + 1] - unwrapped_phase[j - 1]) / (2.0 * dt) / (2.0 * PI);
2162 }
2163 if m > 1 {
2164 inst_freq[m - 1] = (unwrapped_phase[m - 1] - unwrapped_phase[m - 2]) / dt / (2.0 * PI);
2165 }
2166
2167 let period: Vec<f64> = inst_freq
2169 .iter()
2170 .map(|&f| {
2171 if f.abs() > 1e-10 {
2172 (1.0 / f).abs()
2173 } else {
2174 f64::INFINITY
2175 }
2176 })
2177 .collect();
2178
2179 InstantaneousPeriod {
2180 period,
2181 frequency: inst_freq,
2182 amplitude,
2183 }
2184}
2185
2186pub fn analyze_peak_timing(
2207 data: &FdMatrix,
2208 argvals: &[f64],
2209 period: f64,
2210 smooth_nbasis: Option<usize>,
2211) -> PeakTimingResult {
2212 let (n, m) = data.shape();
2213 if n == 0 || m < 3 || argvals.len() != m || period <= 0.0 {
2214 return PeakTimingResult {
2215 peak_times: Vec::new(),
2216 peak_values: Vec::new(),
2217 normalized_timing: Vec::new(),
2218 mean_timing: f64::NAN,
2219 std_timing: f64::NAN,
2220 range_timing: f64::NAN,
2221 variability_score: f64::NAN,
2222 timing_trend: f64::NAN,
2223 cycle_indices: Vec::new(),
2224 };
2225 }
2226
2227 let min_distance = period * 0.7;
2230 let peaks = detect_peaks(
2231 data,
2232 argvals,
2233 Some(min_distance),
2234 None, true, smooth_nbasis,
2237 );
2238
2239 let sample_peaks = if peaks.peaks.is_empty() {
2242 Vec::new()
2243 } else {
2244 peaks.peaks[0].clone()
2245 };
2246
2247 if sample_peaks.is_empty() {
2248 return PeakTimingResult {
2249 peak_times: Vec::new(),
2250 peak_values: Vec::new(),
2251 normalized_timing: Vec::new(),
2252 mean_timing: f64::NAN,
2253 std_timing: f64::NAN,
2254 range_timing: f64::NAN,
2255 variability_score: f64::NAN,
2256 timing_trend: f64::NAN,
2257 cycle_indices: Vec::new(),
2258 };
2259 }
2260
2261 let peak_times: Vec<f64> = sample_peaks.iter().map(|p| p.time).collect();
2262 let peak_values: Vec<f64> = sample_peaks.iter().map(|p| p.value).collect();
2263
2264 let t_start = argvals[0];
2266 let normalized_timing: Vec<f64> = peak_times
2267 .iter()
2268 .map(|&t| {
2269 let cycle_pos = (t - t_start) % period;
2270 cycle_pos / period
2271 })
2272 .collect();
2273
2274 let cycle_indices: Vec<usize> = peak_times
2276 .iter()
2277 .map(|&t| ((t - t_start) / period).floor() as usize + 1)
2278 .collect();
2279
2280 let n_peaks = normalized_timing.len() as f64;
2282 let mean_timing = normalized_timing.iter().sum::<f64>() / n_peaks;
2283
2284 let variance: f64 = normalized_timing
2285 .iter()
2286 .map(|&x| (x - mean_timing).powi(2))
2287 .sum::<f64>()
2288 / n_peaks;
2289 let std_timing = variance.sqrt();
2290
2291 let min_timing = normalized_timing
2292 .iter()
2293 .cloned()
2294 .fold(f64::INFINITY, f64::min);
2295 let max_timing = normalized_timing
2296 .iter()
2297 .cloned()
2298 .fold(f64::NEG_INFINITY, f64::max);
2299 let range_timing = max_timing - min_timing;
2300
2301 let variability_score = (std_timing / 0.1).min(1.0);
2305
2306 let cycle_idx_f64: Vec<f64> = cycle_indices.iter().map(|&i| i as f64).collect();
2308 let timing_trend = linear_slope(&cycle_idx_f64, &normalized_timing);
2309
2310 PeakTimingResult {
2311 peak_times,
2312 peak_values,
2313 normalized_timing,
2314 mean_timing,
2315 std_timing,
2316 range_timing,
2317 variability_score,
2318 timing_trend,
2319 cycle_indices,
2320 }
2321}
2322
2323pub fn classify_seasonality(
2347 data: &FdMatrix,
2348 argvals: &[f64],
2349 period: f64,
2350 strength_threshold: Option<f64>,
2351 timing_threshold: Option<f64>,
2352) -> SeasonalityClassification {
2353 let strength_thresh = strength_threshold.unwrap_or(0.3);
2354 let timing_thresh = timing_threshold.unwrap_or(0.05);
2355
2356 let (n, m) = data.shape();
2357 if n == 0 || m < 4 || argvals.len() != m || period <= 0.0 {
2358 return SeasonalityClassification {
2359 is_seasonal: false,
2360 has_stable_timing: false,
2361 timing_variability: f64::NAN,
2362 seasonal_strength: f64::NAN,
2363 cycle_strengths: Vec::new(),
2364 weak_seasons: Vec::new(),
2365 classification: SeasonalType::NonSeasonal,
2366 peak_timing: None,
2367 };
2368 }
2369
2370 let overall_strength = seasonal_strength_variance(data, argvals, period, 3);
2372
2373 let (cycle_strengths, weak_seasons) =
2374 compute_cycle_strengths(data, argvals, period, strength_thresh);
2375 let n_cycles = cycle_strengths.len();
2376
2377 let peak_timing = analyze_peak_timing(data, argvals, period, None);
2379
2380 let is_seasonal = overall_strength >= strength_thresh;
2382 let has_stable_timing = peak_timing.std_timing <= timing_thresh;
2383 let timing_variability = peak_timing.variability_score;
2384
2385 let n_weak = weak_seasons.len();
2387 let classification = if !is_seasonal {
2388 SeasonalType::NonSeasonal
2389 } else if n_cycles > 0 && n_weak as f64 / n_cycles as f64 > 0.3 {
2390 SeasonalType::IntermittentSeasonal
2392 } else if !has_stable_timing {
2393 SeasonalType::VariableTiming
2394 } else {
2395 SeasonalType::StableSeasonal
2396 };
2397
2398 SeasonalityClassification {
2399 is_seasonal,
2400 has_stable_timing,
2401 timing_variability,
2402 seasonal_strength: overall_strength,
2403 cycle_strengths,
2404 weak_seasons,
2405 classification,
2406 peak_timing: Some(peak_timing),
2407 }
2408}
2409
2410pub fn detect_seasonality_changes_auto(
2424 data: &FdMatrix,
2425 argvals: &[f64],
2426 period: f64,
2427 threshold_method: ThresholdMethod,
2428 window_size: f64,
2429 min_duration: f64,
2430) -> ChangeDetectionResult {
2431 let (n, m) = data.shape();
2432 if n == 0 || m < 4 || argvals.len() != m {
2433 return ChangeDetectionResult {
2434 change_points: Vec::new(),
2435 strength_curve: Vec::new(),
2436 };
2437 }
2438
2439 let strength_curve =
2441 seasonal_strength_windowed(data, argvals, period, window_size, StrengthMethod::Variance);
2442
2443 if strength_curve.is_empty() {
2444 return ChangeDetectionResult {
2445 change_points: Vec::new(),
2446 strength_curve: Vec::new(),
2447 };
2448 }
2449
2450 let threshold = match threshold_method {
2452 ThresholdMethod::Fixed(t) => t,
2453 ThresholdMethod::Percentile(p) => {
2454 let mut sorted: Vec<f64> = strength_curve
2455 .iter()
2456 .copied()
2457 .filter(|x| x.is_finite())
2458 .collect();
2459 sorted.sort_by(|a, b| a.partial_cmp(b).unwrap_or(std::cmp::Ordering::Equal));
2460 if sorted.is_empty() {
2461 0.5
2462 } else {
2463 let idx = ((p / 100.0) * sorted.len() as f64) as usize;
2464 sorted[idx.min(sorted.len() - 1)]
2465 }
2466 }
2467 ThresholdMethod::Otsu => otsu_threshold(&strength_curve),
2468 };
2469
2470 detect_seasonality_changes(data, argvals, period, threshold, window_size, min_duration)
2472}
2473
2474#[derive(Debug, Clone)]
2476pub struct SazedResult {
2477 pub period: f64,
2479 pub confidence: f64,
2481 pub component_periods: SazedComponents,
2483 pub agreeing_components: usize,
2485}
2486
2487#[derive(Debug, Clone)]
2489pub struct SazedComponents {
2490 pub spectral: f64,
2492 pub acf_peak: f64,
2494 pub acf_average: f64,
2496 pub zero_crossing: f64,
2498 pub spectral_diff: f64,
2500}
2501
2502pub fn sazed(data: &[f64], argvals: &[f64], tolerance: Option<f64>) -> SazedResult {
2524 let n = data.len();
2525 let tol = tolerance.unwrap_or(0.05); if n < 8 || argvals.len() != n {
2528 return SazedResult {
2529 period: f64::NAN,
2530 confidence: 0.0,
2531 component_periods: SazedComponents {
2532 spectral: f64::NAN,
2533 acf_peak: f64::NAN,
2534 acf_average: f64::NAN,
2535 zero_crossing: f64::NAN,
2536 spectral_diff: f64::NAN,
2537 },
2538 agreeing_components: 0,
2539 };
2540 }
2541
2542 let dt = (argvals[n - 1] - argvals[0]) / (n - 1) as f64;
2543 let max_lag = (n / 2).max(4);
2544 let signal_range = argvals[n - 1] - argvals[0];
2545
2546 let min_period = signal_range / (n as f64 / 3.0);
2548 let max_period = signal_range / 2.0;
2550
2551 let (spectral_period, spectral_conf) = sazed_spectral_with_confidence(data, argvals);
2553
2554 let (acf_peak_period, acf_peak_conf) = sazed_acf_peak_with_confidence(data, dt, max_lag);
2556
2557 let acf_average_period = sazed_acf_average(data, dt, max_lag);
2559
2560 let (zero_crossing_period, zero_crossing_conf) =
2562 sazed_zero_crossing_with_confidence(data, dt, max_lag);
2563
2564 let (spectral_diff_period, spectral_diff_conf) =
2566 sazed_spectral_diff_with_confidence(data, argvals);
2567
2568 let components = SazedComponents {
2569 spectral: spectral_period,
2570 acf_peak: acf_peak_period,
2571 acf_average: acf_average_period,
2572 zero_crossing: zero_crossing_period,
2573 spectral_diff: spectral_diff_period,
2574 };
2575
2576 let spectral_thresh = 8.0; let acf_thresh = 0.3; let zero_crossing_thresh = 0.9; let spectral_diff_thresh = 6.0; let min_confident_components = 2;
2585
2586 let confident_periods: Vec<f64> = [
2588 validate_sazed_component(
2589 spectral_period,
2590 spectral_conf,
2591 min_period,
2592 max_period,
2593 spectral_thresh,
2594 ),
2595 validate_sazed_component(
2596 acf_peak_period,
2597 acf_peak_conf,
2598 min_period,
2599 max_period,
2600 acf_thresh,
2601 ),
2602 validate_sazed_component(
2603 acf_average_period,
2604 acf_peak_conf,
2605 min_period,
2606 max_period,
2607 acf_thresh,
2608 ),
2609 validate_sazed_component(
2610 zero_crossing_period,
2611 zero_crossing_conf,
2612 min_period,
2613 max_period,
2614 zero_crossing_thresh,
2615 ),
2616 validate_sazed_component(
2617 spectral_diff_period,
2618 spectral_diff_conf,
2619 min_period,
2620 max_period,
2621 spectral_diff_thresh,
2622 ),
2623 ]
2624 .into_iter()
2625 .flatten()
2626 .collect();
2627
2628 if confident_periods.len() < min_confident_components {
2630 return SazedResult {
2631 period: f64::NAN,
2632 confidence: 0.0,
2633 component_periods: components,
2634 agreeing_components: confident_periods.len(),
2635 };
2636 }
2637
2638 let (consensus_period, agreeing_count) = find_consensus_period(&confident_periods, tol);
2640 let confidence = agreeing_count as f64 / 5.0;
2641
2642 SazedResult {
2643 period: consensus_period,
2644 confidence,
2645 component_periods: components,
2646 agreeing_components: agreeing_count,
2647 }
2648}
2649
2650pub fn sazed_fdata(data: &FdMatrix, argvals: &[f64], tolerance: Option<f64>) -> SazedResult {
2661 let (n, m) = data.shape();
2662 if n == 0 || m < 8 || argvals.len() != m {
2663 return SazedResult {
2664 period: f64::NAN,
2665 confidence: 0.0,
2666 component_periods: SazedComponents {
2667 spectral: f64::NAN,
2668 acf_peak: f64::NAN,
2669 acf_average: f64::NAN,
2670 zero_crossing: f64::NAN,
2671 spectral_diff: f64::NAN,
2672 },
2673 agreeing_components: 0,
2674 };
2675 }
2676
2677 let mean_curve = compute_mean_curve(data);
2678 sazed(&mean_curve, argvals, tolerance)
2679}
2680
2681fn sazed_spectral_with_confidence(data: &[f64], argvals: &[f64]) -> (f64, f64) {
2683 let (frequencies, power) = periodogram(data, argvals);
2684
2685 if frequencies.len() < 3 {
2686 return (f64::NAN, 0.0);
2687 }
2688
2689 let power_no_dc: Vec<f64> = power.iter().skip(1).copied().collect();
2691
2692 if power_no_dc.is_empty() {
2693 return (f64::NAN, 0.0);
2694 }
2695
2696 let mut sorted_power = power_no_dc.clone();
2698 sorted_power.sort_by(|a, b| a.partial_cmp(b).unwrap_or(std::cmp::Ordering::Equal));
2699 let noise_floor = sorted_power[sorted_power.len() / 2].max(1e-15);
2700
2701 let mut max_idx = 0;
2703 let mut max_val = 0.0;
2704 for (i, &p) in power_no_dc.iter().enumerate() {
2705 if p > max_val {
2706 max_val = p;
2707 max_idx = i;
2708 }
2709 }
2710
2711 let power_ratio = max_val / noise_floor;
2712 let freq = frequencies[max_idx + 1];
2713
2714 if freq > 1e-15 {
2715 (1.0 / freq, power_ratio)
2716 } else {
2717 (f64::NAN, 0.0)
2718 }
2719}
2720
2721fn sazed_acf_peak_with_confidence(data: &[f64], dt: f64, max_lag: usize) -> (f64, f64) {
2723 let acf = autocorrelation(data, max_lag);
2724
2725 match find_first_acf_peak(&acf) {
2726 Some((peak_lag, acf_value)) => (peak_lag as f64 * dt, acf_value),
2727 None => (f64::NAN, 0.0),
2728 }
2729}
2730
2731fn sazed_acf_average(data: &[f64], dt: f64, max_lag: usize) -> f64 {
2733 let acf = autocorrelation(data, max_lag);
2734
2735 if acf.len() < 4 {
2736 return f64::NAN;
2737 }
2738
2739 let peaks = find_peaks_1d(&acf[1..], 1);
2741
2742 if peaks.is_empty() {
2743 return f64::NAN;
2744 }
2745
2746 let mut weighted_sum = 0.0;
2748 let mut weight_sum = 0.0;
2749
2750 for (i, &peak_idx) in peaks.iter().enumerate() {
2751 let lag = peak_idx + 1;
2752 let weight = acf[lag].max(0.0);
2753
2754 if i == 0 {
2755 weighted_sum += lag as f64 * weight;
2757 weight_sum += weight;
2758 } else {
2759 let expected_fundamental = peaks[0] + 1;
2761 let harmonic = ((lag as f64 / expected_fundamental as f64) + 0.5) as usize;
2762 if harmonic > 0 {
2763 let fundamental_est = lag as f64 / harmonic as f64;
2764 weighted_sum += fundamental_est * weight;
2765 weight_sum += weight;
2766 }
2767 }
2768 }
2769
2770 if weight_sum > 1e-15 {
2771 weighted_sum / weight_sum * dt
2772 } else {
2773 f64::NAN
2774 }
2775}
2776
2777fn sazed_zero_crossing_with_confidence(data: &[f64], dt: f64, max_lag: usize) -> (f64, f64) {
2780 let acf = autocorrelation(data, max_lag);
2781
2782 if acf.len() < 4 {
2783 return (f64::NAN, 0.0);
2784 }
2785
2786 let mut crossings = Vec::new();
2788 for i in 1..acf.len() {
2789 if acf[i - 1] * acf[i] < 0.0 {
2790 let frac = acf[i - 1].abs() / (acf[i - 1].abs() + acf[i].abs());
2792 crossings.push((i - 1) as f64 + frac);
2793 }
2794 }
2795
2796 if crossings.len() < 2 {
2797 return (f64::NAN, 0.0);
2798 }
2799
2800 let mut half_periods = Vec::new();
2803 for i in 1..crossings.len() {
2804 half_periods.push(crossings[i] - crossings[i - 1]);
2805 }
2806
2807 if half_periods.is_empty() {
2808 return (f64::NAN, 0.0);
2809 }
2810
2811 let mean: f64 = half_periods.iter().sum::<f64>() / half_periods.len() as f64;
2814 let variance: f64 =
2815 half_periods.iter().map(|x| (x - mean).powi(2)).sum::<f64>() / half_periods.len() as f64;
2816 let std = variance.sqrt();
2817 let consistency = (1.0 - std / mean.max(1e-15)).clamp(0.0, 1.0);
2818
2819 half_periods.sort_by(|a, b| a.partial_cmp(b).unwrap_or(std::cmp::Ordering::Equal));
2821 let median_half = half_periods[half_periods.len() / 2];
2822
2823 (2.0 * median_half * dt, consistency)
2824}
2825
2826fn sazed_spectral_diff_with_confidence(data: &[f64], argvals: &[f64]) -> (f64, f64) {
2828 if data.len() < 4 {
2829 return (f64::NAN, 0.0);
2830 }
2831
2832 let diff: Vec<f64> = data.windows(2).map(|w| w[1] - w[0]).collect();
2834 let diff_argvals: Vec<f64> = argvals.windows(2).map(|w| (w[0] + w[1]) / 2.0).collect();
2835
2836 sazed_spectral_with_confidence(&diff, &diff_argvals)
2837}
2838
2839fn find_spectral_peaks(power: &[f64]) -> Vec<usize> {
2841 if power.len() < 3 {
2842 return Vec::new();
2843 }
2844
2845 let mut sorted_power: Vec<f64> = power.to_vec();
2847 sorted_power.sort_by(|a, b| a.partial_cmp(b).unwrap_or(std::cmp::Ordering::Equal));
2848 let noise_floor = sorted_power[sorted_power.len() / 2];
2849 let threshold = noise_floor * 2.0; let mut peaks: Vec<(usize, f64)> = Vec::new();
2853 for i in 1..(power.len() - 1) {
2854 if power[i] > power[i - 1] && power[i] > power[i + 1] && power[i] > threshold {
2855 peaks.push((i, power[i]));
2856 }
2857 }
2858
2859 peaks.sort_by(|a, b| b.1.partial_cmp(&a.1).unwrap_or(std::cmp::Ordering::Equal));
2861
2862 peaks.into_iter().map(|(idx, _)| idx).collect()
2863}
2864
2865fn find_consensus_period(periods: &[f64], tolerance: f64) -> (f64, usize) {
2867 if periods.is_empty() {
2868 return (f64::NAN, 0);
2869 }
2870 if periods.len() == 1 {
2871 return (periods[0], 1);
2872 }
2873
2874 let mut best_period = periods[0];
2875 let mut best_count = 0;
2876 let mut best_sum = 0.0;
2877
2878 for &p1 in periods {
2879 let (count, sum) = count_agreeing_periods(periods, p1, tolerance);
2880
2881 if count > best_count
2882 || (count == best_count && sum / count as f64 > best_sum / best_count.max(1) as f64)
2883 {
2884 best_count = count;
2885 best_period = sum / count as f64;
2886 best_sum = sum;
2887 }
2888 }
2889
2890 (best_period, best_count)
2891}
2892
2893#[derive(Debug, Clone)]
2895pub struct AutoperiodResult {
2896 pub period: f64,
2898 pub confidence: f64,
2900 pub fft_power: f64,
2902 pub acf_validation: f64,
2904 pub candidates: Vec<AutoperiodCandidate>,
2906}
2907
2908#[derive(Debug, Clone)]
2910pub struct AutoperiodCandidate {
2911 pub period: f64,
2913 pub fft_power: f64,
2915 pub acf_score: f64,
2917 pub combined_score: f64,
2919}
2920
2921fn empty_autoperiod_result() -> AutoperiodResult {
2922 AutoperiodResult {
2923 period: f64::NAN,
2924 confidence: 0.0,
2925 fft_power: 0.0,
2926 acf_validation: 0.0,
2927 candidates: Vec::new(),
2928 }
2929}
2930
2931fn build_autoperiod_candidate(
2933 peak_idx: usize,
2934 frequencies: &[f64],
2935 power_no_dc: &[f64],
2936 acf: &[f64],
2937 dt: f64,
2938 steps: usize,
2939 total_power: f64,
2940) -> Option<AutoperiodCandidate> {
2941 let freq = frequencies[peak_idx + 1];
2942 if freq < 1e-15 {
2943 return None;
2944 }
2945 let fft_power = power_no_dc[peak_idx];
2946 let normalized_power = fft_power / total_power.max(1e-15);
2947 let refined_period = refine_period_gradient(acf, 1.0 / freq, dt, steps);
2948 let refined_acf_score = validate_period_acf(acf, refined_period, dt);
2949 Some(AutoperiodCandidate {
2950 period: refined_period,
2951 fft_power,
2952 acf_score: refined_acf_score,
2953 combined_score: normalized_power * refined_acf_score,
2954 })
2955}
2956
2957pub fn autoperiod(
2978 data: &[f64],
2979 argvals: &[f64],
2980 n_candidates: Option<usize>,
2981 gradient_steps: Option<usize>,
2982) -> AutoperiodResult {
2983 let n = data.len();
2984 let max_candidates = n_candidates.unwrap_or(5);
2985 let steps = gradient_steps.unwrap_or(10);
2986
2987 if n < 8 || argvals.len() != n {
2988 return empty_autoperiod_result();
2989 }
2990
2991 let dt = (argvals[n - 1] - argvals[0]) / (n - 1) as f64;
2992 let max_lag = (n / 2).max(4);
2993
2994 let (frequencies, power) = periodogram(data, argvals);
2996
2997 if frequencies.len() < 3 {
2998 return empty_autoperiod_result();
2999 }
3000
3001 let power_no_dc: Vec<f64> = power.iter().skip(1).copied().collect();
3003 let peak_indices = find_spectral_peaks(&power_no_dc);
3004
3005 if peak_indices.is_empty() {
3006 return empty_autoperiod_result();
3007 }
3008
3009 let acf = autocorrelation(data, max_lag);
3011
3012 let total_power: f64 = power_no_dc.iter().sum();
3014 let candidates: Vec<AutoperiodCandidate> = peak_indices
3015 .iter()
3016 .take(max_candidates)
3017 .filter_map(|&peak_idx| {
3018 build_autoperiod_candidate(
3019 peak_idx,
3020 &frequencies,
3021 &power_no_dc,
3022 &acf,
3023 dt,
3024 steps,
3025 total_power,
3026 )
3027 })
3028 .collect();
3029
3030 if candidates.is_empty() {
3031 return empty_autoperiod_result();
3032 }
3033
3034 let best = candidates
3036 .iter()
3037 .max_by(|a, b| {
3038 a.combined_score
3039 .partial_cmp(&b.combined_score)
3040 .unwrap_or(std::cmp::Ordering::Equal)
3041 })
3042 .unwrap();
3043
3044 AutoperiodResult {
3045 period: best.period,
3046 confidence: best.combined_score,
3047 fft_power: best.fft_power,
3048 acf_validation: best.acf_score,
3049 candidates,
3050 }
3051}
3052
3053pub fn autoperiod_fdata(
3055 data: &FdMatrix,
3056 argvals: &[f64],
3057 n_candidates: Option<usize>,
3058 gradient_steps: Option<usize>,
3059) -> AutoperiodResult {
3060 let (n, m) = data.shape();
3061 if n == 0 || m < 8 || argvals.len() != m {
3062 return AutoperiodResult {
3063 period: f64::NAN,
3064 confidence: 0.0,
3065 fft_power: 0.0,
3066 acf_validation: 0.0,
3067 candidates: Vec::new(),
3068 };
3069 }
3070
3071 let mean_curve = compute_mean_curve(data);
3072 autoperiod(&mean_curve, argvals, n_candidates, gradient_steps)
3073}
3074
3075fn validate_period_acf(acf: &[f64], period: f64, dt: f64) -> f64 {
3077 let lag = (period / dt).round() as usize;
3078
3079 if lag == 0 || lag >= acf.len() {
3080 return 0.0;
3081 }
3082
3083 let acf_at_lag = acf[lag];
3086
3087 let half_lag = lag / 2;
3089 let double_lag = lag * 2;
3090
3091 let mut score = acf_at_lag.max(0.0);
3092
3093 if half_lag > 0 && half_lag < acf.len() {
3096 let half_acf = acf[half_lag];
3097 if half_acf > acf_at_lag * 0.7 {
3099 score *= 0.5;
3100 }
3101 }
3102
3103 if double_lag < acf.len() {
3104 let double_acf = acf[double_lag];
3105 if double_acf > 0.3 {
3107 score *= 1.2;
3108 }
3109 }
3110
3111 score.min(1.0)
3112}
3113
3114fn refine_period_gradient(acf: &[f64], initial_period: f64, dt: f64, steps: usize) -> f64 {
3116 let mut period = initial_period;
3117 let step_size = dt * 0.5; for _ in 0..steps {
3120 let current_score = validate_period_acf(acf, period, dt);
3121 let left_score = validate_period_acf(acf, period - step_size, dt);
3122 let right_score = validate_period_acf(acf, period + step_size, dt);
3123
3124 if left_score > current_score && left_score > right_score {
3125 period -= step_size;
3126 } else if right_score > current_score {
3127 period += step_size;
3128 }
3129 }
3131
3132 period.max(dt) }
3134
3135#[derive(Debug, Clone)]
3137pub struct CfdAutoperiodResult {
3138 pub period: f64,
3140 pub confidence: f64,
3142 pub acf_validation: f64,
3144 pub periods: Vec<f64>,
3146 pub confidences: Vec<f64>,
3148}
3149
3150fn generate_cfd_candidates(
3152 frequencies: &[f64],
3153 power_no_dc: &[f64],
3154 peak_indices: &[usize],
3155) -> Vec<(f64, f64)> {
3156 let total_power: f64 = power_no_dc.iter().sum();
3157 peak_indices
3158 .iter()
3159 .filter_map(|&peak_idx| {
3160 let freq = frequencies[peak_idx + 1];
3161 if freq > 1e-15 {
3162 let period = 1.0 / freq;
3163 let normalized_power = power_no_dc[peak_idx] / total_power.max(1e-15);
3164 Some((period, normalized_power))
3165 } else {
3166 None
3167 }
3168 })
3169 .collect()
3170}
3171
3172fn validate_cfd_candidates(clusters: &[(f64, f64)], acf: &[f64], dt: f64) -> Vec<(f64, f64, f64)> {
3174 clusters
3175 .iter()
3176 .filter_map(|&(center, power_sum)| {
3177 let acf_score = validate_period_acf(acf, center, dt);
3178 if acf_score > 0.1 {
3179 Some((center, acf_score, power_sum))
3180 } else {
3181 None
3182 }
3183 })
3184 .collect()
3185}
3186
3187fn validate_or_fallback_cfd(
3189 validated: Vec<(f64, f64, f64)>,
3190 candidates: &[(f64, f64)],
3191 tol: f64,
3192 min_size: usize,
3193) -> Vec<(f64, f64, f64)> {
3194 if !validated.is_empty() {
3195 return validated;
3196 }
3197 cluster_periods(candidates, tol, min_size)
3199 .into_iter()
3200 .max_by(|a, b| a.1.partial_cmp(&b.1).unwrap_or(std::cmp::Ordering::Equal))
3201 .map(|(center, power_sum)| vec![(center, 0.0, power_sum)])
3202 .unwrap_or_default()
3203}
3204
3205fn rank_cfd_results(validated: &[(f64, f64, f64)]) -> (Vec<f64>, Vec<f64>, f64) {
3208 let mut sorted: Vec<_> = validated.to_vec();
3209 sorted.sort_by(|a, b| {
3210 (b.1 * b.2)
3211 .partial_cmp(&(a.1 * a.2))
3212 .unwrap_or(std::cmp::Ordering::Equal)
3213 });
3214 let top_acf = sorted[0].1;
3215 let periods = sorted.iter().map(|v| v.0).collect();
3216 let confidences = sorted.iter().map(|v| v.1 * v.2).collect();
3217 (periods, confidences, top_acf)
3218}
3219
3220fn empty_cfd_result() -> CfdAutoperiodResult {
3221 CfdAutoperiodResult {
3222 period: f64::NAN,
3223 confidence: 0.0,
3224 acf_validation: 0.0,
3225 periods: Vec::new(),
3226 confidences: Vec::new(),
3227 }
3228}
3229
3230fn extract_cfd_spectral_candidates(data: &[f64], argvals: &[f64]) -> Option<Vec<(f64, f64)>> {
3232 let diff: Vec<f64> = data.windows(2).map(|w| w[1] - w[0]).collect();
3233 let diff_argvals: Vec<f64> = argvals.windows(2).map(|w| (w[0] + w[1]) / 2.0).collect();
3234 let (frequencies, power) = periodogram(&diff, &diff_argvals);
3235 if frequencies.len() < 3 {
3236 return None;
3237 }
3238 let power_no_dc: Vec<f64> = power.iter().skip(1).copied().collect();
3239 let peak_indices = find_spectral_peaks(&power_no_dc);
3240 if peak_indices.is_empty() {
3241 return None;
3242 }
3243 let candidates = generate_cfd_candidates(&frequencies, &power_no_dc, &peak_indices);
3244 if candidates.is_empty() {
3245 None
3246 } else {
3247 Some(candidates)
3248 }
3249}
3250
3251pub fn cfd_autoperiod(
3272 data: &[f64],
3273 argvals: &[f64],
3274 cluster_tolerance: Option<f64>,
3275 min_cluster_size: Option<usize>,
3276) -> CfdAutoperiodResult {
3277 let n = data.len();
3278 let tol = cluster_tolerance.unwrap_or(0.1);
3279 let min_size = min_cluster_size.unwrap_or(1);
3280
3281 if n < 8 || argvals.len() != n {
3282 return empty_cfd_result();
3283 }
3284
3285 let dt = (argvals[n - 1] - argvals[0]) / (n - 1) as f64;
3286 let max_lag = (n / 2).max(4);
3287
3288 let Some(candidates) = extract_cfd_spectral_candidates(data, argvals) else {
3289 return empty_cfd_result();
3290 };
3291
3292 let clusters = cluster_periods(&candidates, tol, min_size);
3293 if clusters.is_empty() {
3294 return empty_cfd_result();
3295 }
3296
3297 let acf = autocorrelation(data, max_lag);
3298 let validated = validate_cfd_candidates(&clusters, &acf, dt);
3299 let validated = validate_or_fallback_cfd(validated, &candidates, tol, min_size);
3300 let (periods, confidences, top_acf) = rank_cfd_results(&validated);
3301
3302 CfdAutoperiodResult {
3303 period: periods[0],
3304 confidence: confidences[0],
3305 acf_validation: top_acf,
3306 periods,
3307 confidences,
3308 }
3309}
3310
3311pub fn cfd_autoperiod_fdata(
3313 data: &FdMatrix,
3314 argvals: &[f64],
3315 cluster_tolerance: Option<f64>,
3316 min_cluster_size: Option<usize>,
3317) -> CfdAutoperiodResult {
3318 let (n, m) = data.shape();
3319 if n == 0 || m < 8 || argvals.len() != m {
3320 return CfdAutoperiodResult {
3321 period: f64::NAN,
3322 confidence: 0.0,
3323 acf_validation: 0.0,
3324 periods: Vec::new(),
3325 confidences: Vec::new(),
3326 };
3327 }
3328
3329 let mean_curve = compute_mean_curve(data);
3330 cfd_autoperiod(&mean_curve, argvals, cluster_tolerance, min_cluster_size)
3331}
3332
3333fn cluster_periods(candidates: &[(f64, f64)], tolerance: f64, min_size: usize) -> Vec<(f64, f64)> {
3335 if candidates.is_empty() {
3336 return Vec::new();
3337 }
3338
3339 let mut sorted: Vec<(f64, f64)> = candidates.to_vec();
3341 sorted.sort_by(|a, b| a.0.partial_cmp(&b.0).unwrap_or(std::cmp::Ordering::Equal));
3342
3343 let mut clusters: Vec<(f64, f64)> = Vec::new(); let mut current_cluster: Vec<(f64, f64)> = vec![sorted[0]];
3345
3346 for &(period, power) in sorted.iter().skip(1) {
3347 let cluster_center =
3348 current_cluster.iter().map(|(p, _)| p).sum::<f64>() / current_cluster.len() as f64;
3349
3350 let rel_diff = (period - cluster_center).abs() / cluster_center.max(period);
3351
3352 if rel_diff <= tolerance {
3353 current_cluster.push((period, power));
3355 } else {
3356 if current_cluster.len() >= min_size {
3358 let center = current_cluster.iter().map(|(p, pw)| p * pw).sum::<f64>()
3359 / current_cluster
3360 .iter()
3361 .map(|(_, pw)| pw)
3362 .sum::<f64>()
3363 .max(1e-15);
3364 let total_power: f64 = current_cluster.iter().map(|(_, pw)| pw).sum();
3365 clusters.push((center, total_power));
3366 }
3367 current_cluster = vec![(period, power)];
3368 }
3369 }
3370
3371 if current_cluster.len() >= min_size {
3373 let center = current_cluster.iter().map(|(p, pw)| p * pw).sum::<f64>()
3374 / current_cluster
3375 .iter()
3376 .map(|(_, pw)| pw)
3377 .sum::<f64>()
3378 .max(1e-15);
3379 let total_power: f64 = current_cluster.iter().map(|(_, pw)| pw).sum();
3380 clusters.push((center, total_power));
3381 }
3382
3383 clusters
3384}
3385
3386#[derive(Debug, Clone)]
3392pub struct LombScargleResult {
3393 pub frequencies: Vec<f64>,
3395 pub periods: Vec<f64>,
3397 pub power: Vec<f64>,
3399 pub peak_period: f64,
3401 pub peak_frequency: f64,
3403 pub peak_power: f64,
3405 pub false_alarm_probability: f64,
3407 pub significance: f64,
3409}
3410
3411pub fn lomb_scargle(
3451 times: &[f64],
3452 values: &[f64],
3453 frequencies: Option<&[f64]>,
3454 oversampling: Option<f64>,
3455 nyquist_factor: Option<f64>,
3456) -> LombScargleResult {
3457 let n = times.len();
3458 assert_eq!(n, values.len(), "times and values must have same length");
3459 assert!(n >= 3, "Need at least 3 data points");
3460
3461 let mean_y: f64 = values.iter().sum::<f64>() / n as f64;
3463 let var_y: f64 = values.iter().map(|&y| (y - mean_y).powi(2)).sum::<f64>() / (n - 1) as f64;
3464
3465 let freq_vec: Vec<f64>;
3467 let freqs = if let Some(f) = frequencies {
3468 f
3469 } else {
3470 freq_vec = generate_ls_frequencies(
3471 times,
3472 oversampling.unwrap_or(4.0),
3473 nyquist_factor.unwrap_or(1.0),
3474 );
3475 &freq_vec
3476 };
3477
3478 let mut power = Vec::with_capacity(freqs.len());
3480
3481 for &freq in freqs.iter() {
3482 let omega = 2.0 * PI * freq;
3483 let p = lomb_scargle_single_freq(times, values, mean_y, var_y, omega);
3484 power.push(p);
3485 }
3486
3487 let (peak_idx, &peak_power) = power
3489 .iter()
3490 .enumerate()
3491 .max_by(|(_, a), (_, b)| a.partial_cmp(b).unwrap_or(std::cmp::Ordering::Equal))
3492 .unwrap_or((0, &0.0));
3493
3494 let peak_frequency = freqs.get(peak_idx).copied().unwrap_or(0.0);
3495 let peak_period = if peak_frequency > 0.0 {
3496 1.0 / peak_frequency
3497 } else {
3498 f64::INFINITY
3499 };
3500
3501 let n_indep = estimate_independent_frequencies(times, freqs.len());
3503 let fap = lomb_scargle_fap(peak_power, n_indep, n);
3504
3505 let periods: Vec<f64> = freqs
3507 .iter()
3508 .map(|&f| if f > 0.0 { 1.0 / f } else { f64::INFINITY })
3509 .collect();
3510
3511 LombScargleResult {
3512 frequencies: freqs.to_vec(),
3513 periods,
3514 power,
3515 peak_period,
3516 peak_frequency,
3517 peak_power,
3518 false_alarm_probability: fap,
3519 significance: 1.0 - fap,
3520 }
3521}
3522
3523fn lomb_scargle_single_freq(
3527 times: &[f64],
3528 values: &[f64],
3529 mean_y: f64,
3530 var_y: f64,
3531 omega: f64,
3532) -> f64 {
3533 if var_y <= 0.0 || omega <= 0.0 {
3534 return 0.0;
3535 }
3536
3537 let n = times.len();
3538
3539 let mut sum_sin2 = 0.0;
3541 let mut sum_cos2 = 0.0;
3542 for &t in times.iter() {
3543 let arg = 2.0 * omega * t;
3544 sum_sin2 += arg.sin();
3545 sum_cos2 += arg.cos();
3546 }
3547 let tau = (sum_sin2).atan2(sum_cos2) / (2.0 * omega);
3548
3549 let mut ss = 0.0; let mut sc = 0.0; let mut css = 0.0; let mut sss = 0.0; for i in 0..n {
3556 let y_centered = values[i] - mean_y;
3557 let arg = omega * (times[i] - tau);
3558 let c = arg.cos();
3559 let s = arg.sin();
3560
3561 sc += y_centered * c;
3562 ss += y_centered * s;
3563 css += c * c;
3564 sss += s * s;
3565 }
3566
3567 let css = css.max(1e-15);
3569 let sss = sss.max(1e-15);
3570
3571 0.5 * (sc * sc / css + ss * ss / sss) / var_y
3573}
3574
3575fn generate_ls_frequencies(times: &[f64], oversampling: f64, nyquist_factor: f64) -> Vec<f64> {
3579 let n = times.len();
3580 if n < 2 {
3581 return vec![0.0];
3582 }
3583
3584 let t_min = times.iter().cloned().fold(f64::INFINITY, f64::min);
3586 let t_max = times.iter().cloned().fold(f64::NEG_INFINITY, f64::max);
3587 let t_span = (t_max - t_min).max(1e-10);
3588
3589 let f_min = 1.0 / t_span;
3591
3592 let f_nyquist = 0.5 * (n - 1) as f64 / t_span;
3595
3596 let f_max = f_nyquist * nyquist_factor;
3598
3599 let df = f_min / oversampling;
3601
3602 let n_freq = ((f_max - f_min) / df).ceil() as usize + 1;
3604 let n_freq = n_freq.min(10000); (0..n_freq).map(|i| f_min + i as f64 * df).collect()
3607}
3608
3609fn estimate_independent_frequencies(times: &[f64], n_freq: usize) -> usize {
3614 let n = times.len();
3616 n.min(n_freq)
3617}
3618
3619fn lomb_scargle_fap(power: f64, n_indep: usize, _n_data: usize) -> f64 {
3625 if power <= 0.0 || n_indep == 0 {
3626 return 1.0;
3627 }
3628
3629 let prob_single = 1.0 - (-power).exp();
3631
3632 if prob_single >= 1.0 {
3639 return 0.0; }
3641 if prob_single <= 0.0 {
3642 return 1.0; }
3644
3645 let log_prob = prob_single.ln();
3646 let log_cdf = n_indep as f64 * log_prob;
3647
3648 if log_cdf < -700.0 {
3649 0.0 } else {
3651 1.0 - log_cdf.exp()
3652 }
3653}
3654
3655pub fn lomb_scargle_fdata(
3671 data: &FdMatrix,
3672 argvals: &[f64],
3673 oversampling: Option<f64>,
3674 nyquist_factor: Option<f64>,
3675) -> LombScargleResult {
3676 let mean_curve = compute_mean_curve(data);
3678
3679 lomb_scargle(argvals, &mean_curve, None, oversampling, nyquist_factor)
3681}
3682
3683#[derive(Debug, Clone)]
3689pub struct MatrixProfileResult {
3690 pub profile: Vec<f64>,
3692 pub profile_index: Vec<usize>,
3694 pub subsequence_length: usize,
3696 pub detected_periods: Vec<f64>,
3698 pub arc_counts: Vec<usize>,
3700 pub primary_period: f64,
3702 pub confidence: f64,
3704}
3705
3706pub fn matrix_profile(
3742 values: &[f64],
3743 subsequence_length: Option<usize>,
3744 exclusion_zone: Option<f64>,
3745) -> MatrixProfileResult {
3746 let n = values.len();
3747
3748 let m = subsequence_length.unwrap_or_else(|| {
3750 let default_m = n / 4;
3751 default_m.max(4).min(n / 2)
3752 });
3753
3754 assert!(m >= 3, "Subsequence length must be at least 3");
3755 assert!(
3756 m <= n / 2,
3757 "Subsequence length must be at most half the series length"
3758 );
3759
3760 let exclusion_zone = exclusion_zone.unwrap_or(0.5);
3761 let exclusion_radius = (m as f64 * exclusion_zone).ceil() as usize;
3762
3763 let profile_len = n - m + 1;
3765
3766 let (means, stds) = compute_sliding_stats(values, m);
3768
3769 let (profile, profile_index) = stomp_core(values, m, &means, &stds, exclusion_radius);
3771
3772 let (arc_counts, detected_periods, primary_period, confidence) =
3774 analyze_arcs(&profile_index, profile_len, m);
3775
3776 MatrixProfileResult {
3777 profile,
3778 profile_index,
3779 subsequence_length: m,
3780 detected_periods,
3781 arc_counts,
3782 primary_period,
3783 confidence,
3784 }
3785}
3786
3787fn compute_sliding_stats(values: &[f64], m: usize) -> (Vec<f64>, Vec<f64>) {
3791 let n = values.len();
3792 let profile_len = n - m + 1;
3793
3794 let mut cumsum = vec![0.0; n + 1];
3796 let mut cumsum_sq = vec![0.0; n + 1];
3797
3798 for i in 0..n {
3799 cumsum[i + 1] = cumsum[i] + values[i];
3800 cumsum_sq[i + 1] = cumsum_sq[i] + values[i] * values[i];
3801 }
3802
3803 let mut means = Vec::with_capacity(profile_len);
3805 let mut stds = Vec::with_capacity(profile_len);
3806
3807 let m_f64 = m as f64;
3808
3809 for i in 0..profile_len {
3810 let sum = cumsum[i + m] - cumsum[i];
3811 let sum_sq = cumsum_sq[i + m] - cumsum_sq[i];
3812
3813 let mean = sum / m_f64;
3814 let variance = (sum_sq / m_f64) - mean * mean;
3815 let std = variance.max(0.0).sqrt();
3816
3817 means.push(mean);
3818 stds.push(std.max(1e-10)); }
3820
3821 (means, stds)
3822}
3823
3824fn stomp_core(
3828 values: &[f64],
3829 m: usize,
3830 means: &[f64],
3831 stds: &[f64],
3832 exclusion_radius: usize,
3833) -> (Vec<f64>, Vec<usize>) {
3834 let n = values.len();
3835 let profile_len = n - m + 1;
3836
3837 let mut profile = vec![f64::INFINITY; profile_len];
3839 let mut profile_index = vec![0usize; profile_len];
3840
3841 let mut qt = vec![0.0; profile_len];
3844
3845 for j in 0..profile_len {
3847 let mut dot = 0.0;
3848 for k in 0..m {
3849 dot += values[k] * values[j + k];
3850 }
3851 qt[j] = dot;
3852 }
3853
3854 update_profile_row(
3856 0,
3857 &qt,
3858 means,
3859 stds,
3860 m,
3861 exclusion_radius,
3862 &mut profile,
3863 &mut profile_index,
3864 );
3865
3866 for i in 1..profile_len {
3868 let mut qt_new = vec![0.0; profile_len];
3871
3872 let mut dot = 0.0;
3874 for k in 0..m {
3875 dot += values[i + k] * values[k];
3876 }
3877 qt_new[0] = dot;
3878
3879 for j in 1..profile_len {
3881 qt_new[j] =
3882 qt[j - 1] - values[i - 1] * values[j - 1] + values[i + m - 1] * values[j + m - 1];
3883 }
3884
3885 qt = qt_new;
3886
3887 update_profile_row(
3889 i,
3890 &qt,
3891 means,
3892 stds,
3893 m,
3894 exclusion_radius,
3895 &mut profile,
3896 &mut profile_index,
3897 );
3898 }
3899
3900 (profile, profile_index)
3901}
3902
3903fn update_profile_row(
3905 i: usize,
3906 qt: &[f64],
3907 means: &[f64],
3908 stds: &[f64],
3909 m: usize,
3910 exclusion_radius: usize,
3911 profile: &mut [f64],
3912 profile_index: &mut [usize],
3913) {
3914 let profile_len = profile.len();
3915 let m_f64 = m as f64;
3916
3917 for j in 0..profile_len {
3918 if i.abs_diff(j) <= exclusion_radius {
3920 continue;
3921 }
3922
3923 let numerator = qt[j] - m_f64 * means[i] * means[j];
3926 let denominator = m_f64 * stds[i] * stds[j];
3927
3928 let pearson = if denominator > 0.0 {
3929 (numerator / denominator).clamp(-1.0, 1.0)
3930 } else {
3931 0.0
3932 };
3933
3934 let dist_sq = 2.0 * m_f64 * (1.0 - pearson);
3935 let dist = dist_sq.max(0.0).sqrt();
3936
3937 if dist < profile[i] {
3939 profile[i] = dist;
3940 profile_index[i] = j;
3941 }
3942
3943 if dist < profile[j] {
3945 profile[j] = dist;
3946 profile_index[j] = i;
3947 }
3948 }
3949}
3950
3951fn analyze_arcs(
3956 profile_index: &[usize],
3957 profile_len: usize,
3958 m: usize,
3959) -> (Vec<usize>, Vec<f64>, f64, f64) {
3960 let max_distance = profile_len;
3962 let mut arc_counts = vec![0usize; max_distance];
3963
3964 for (i, &j) in profile_index.iter().enumerate() {
3965 let distance = i.abs_diff(j);
3966 if distance < max_distance {
3967 arc_counts[distance] += 1;
3968 }
3969 }
3970
3971 let min_period = m / 2; let mut peaks: Vec<(usize, usize)> = Vec::new();
3974
3975 for i in min_period..arc_counts.len().saturating_sub(1) {
3977 if arc_counts[i] > arc_counts[i.saturating_sub(1)]
3978 && arc_counts[i] > arc_counts[(i + 1).min(arc_counts.len() - 1)]
3979 && arc_counts[i] >= 3
3980 {
3982 peaks.push((i, arc_counts[i]));
3983 }
3984 }
3985
3986 peaks.sort_by(|a, b| b.1.cmp(&a.1));
3988
3989 let detected_periods: Vec<f64> = peaks.iter().take(5).map(|(p, _)| *p as f64).collect();
3991
3992 let (primary_period, confidence) = if let Some(&(period, count)) = peaks.first() {
3994 let total_arcs: usize = arc_counts[min_period..].iter().sum();
3996 let conf = if total_arcs > 0 {
3997 count as f64 / total_arcs as f64
3998 } else {
3999 0.0
4000 };
4001 (period as f64, conf.min(1.0))
4002 } else {
4003 (0.0, 0.0)
4004 };
4005
4006 (arc_counts, detected_periods, primary_period, confidence)
4007}
4008
4009pub fn matrix_profile_fdata(
4023 data: &FdMatrix,
4024 subsequence_length: Option<usize>,
4025 exclusion_zone: Option<f64>,
4026) -> MatrixProfileResult {
4027 let mean_curve = compute_mean_curve(data);
4029
4030 matrix_profile(&mean_curve, subsequence_length, exclusion_zone)
4032}
4033
4034pub fn matrix_profile_seasonality(
4046 values: &[f64],
4047 subsequence_length: Option<usize>,
4048 confidence_threshold: Option<f64>,
4049) -> (bool, f64, f64) {
4050 let result = matrix_profile(values, subsequence_length, None);
4051
4052 let threshold = confidence_threshold.unwrap_or(0.1);
4053 let is_seasonal = result.confidence >= threshold && result.primary_period > 0.0;
4054
4055 (is_seasonal, result.primary_period, result.confidence)
4056}
4057
4058#[derive(Debug, Clone)]
4064pub struct SsaResult {
4065 pub trend: Vec<f64>,
4067 pub seasonal: Vec<f64>,
4069 pub noise: Vec<f64>,
4071 pub singular_values: Vec<f64>,
4073 pub contributions: Vec<f64>,
4075 pub window_length: usize,
4077 pub n_components: usize,
4079 pub detected_period: f64,
4081 pub confidence: f64,
4083}
4084
4085pub fn ssa(
4126 values: &[f64],
4127 window_length: Option<usize>,
4128 n_components: Option<usize>,
4129 trend_components: Option<&[usize]>,
4130 seasonal_components: Option<&[usize]>,
4131) -> SsaResult {
4132 let n = values.len();
4133
4134 let l = window_length.unwrap_or_else(|| (n / 2).clamp(2, 50));
4136
4137 if n < 4 || l < 2 || l > n / 2 {
4138 return SsaResult {
4139 trend: values.to_vec(),
4140 seasonal: vec![0.0; n],
4141 noise: vec![0.0; n],
4142 singular_values: vec![],
4143 contributions: vec![],
4144 window_length: l,
4145 n_components: 0,
4146 detected_period: 0.0,
4147 confidence: 0.0,
4148 };
4149 }
4150
4151 let k = n - l + 1;
4153
4154 let trajectory = embed_trajectory(values, l, k);
4156
4157 let (u, sigma, vt) = svd_decompose(&trajectory, l, k);
4159
4160 let max_components = sigma.len();
4162 let n_comp = n_components.unwrap_or(10).min(max_components);
4163
4164 let total_var: f64 = sigma.iter().map(|&s| s * s).sum();
4166 let contributions: Vec<f64> = sigma
4167 .iter()
4168 .take(n_comp)
4169 .map(|&s| s * s / total_var.max(1e-15))
4170 .collect();
4171
4172 let (trend_idx, seasonal_idx, detected_period, confidence) =
4174 if trend_components.is_some() || seasonal_components.is_some() {
4175 let t_idx: Vec<usize> = trend_components.map(|v| v.to_vec()).unwrap_or_default();
4177 let s_idx: Vec<usize> = seasonal_components.map(|v| v.to_vec()).unwrap_or_default();
4178 (t_idx, s_idx, 0.0, 0.0)
4179 } else {
4180 auto_group_ssa_components(&u, &sigma, l, k, n_comp)
4182 };
4183
4184 let trend = reconstruct_grouped(&u, &sigma, &vt, l, k, n, &trend_idx);
4186 let seasonal = reconstruct_grouped(&u, &sigma, &vt, l, k, n, &seasonal_idx);
4187
4188 let noise: Vec<f64> = values
4190 .iter()
4191 .zip(trend.iter())
4192 .zip(seasonal.iter())
4193 .map(|((&y, &t), &s)| y - t - s)
4194 .collect();
4195
4196 SsaResult {
4197 trend,
4198 seasonal,
4199 noise,
4200 singular_values: sigma.into_iter().take(n_comp).collect(),
4201 contributions,
4202 window_length: l,
4203 n_components: n_comp,
4204 detected_period,
4205 confidence,
4206 }
4207}
4208
4209fn embed_trajectory(values: &[f64], l: usize, k: usize) -> Vec<f64> {
4211 let mut trajectory = vec![0.0; l * k];
4213
4214 for j in 0..k {
4215 for i in 0..l {
4216 trajectory[i + j * l] = values[i + j];
4217 }
4218 }
4219
4220 trajectory
4221}
4222
4223fn svd_decompose(trajectory: &[f64], l: usize, k: usize) -> (Vec<f64>, Vec<f64>, Vec<f64>) {
4225 use nalgebra::{DMatrix, SVD};
4226
4227 let mat = DMatrix::from_column_slice(l, k, trajectory);
4229
4230 let svd = SVD::new(mat, true, true);
4232
4233 let u_mat = match svd.u {
4236 Some(u) => u,
4237 None => return (vec![], vec![], vec![]),
4238 };
4239 let vt_mat = match svd.v_t {
4240 Some(vt) => vt,
4241 None => return (vec![], vec![], vec![]),
4242 };
4243 let sigma = svd.singular_values;
4244
4245 let u: Vec<f64> = u_mat.iter().cloned().collect();
4247 let sigma_vec: Vec<f64> = sigma.iter().cloned().collect();
4248 let vt: Vec<f64> = vt_mat.iter().cloned().collect();
4249
4250 (u, sigma_vec, vt)
4251}
4252
4253enum SsaComponentKind {
4254 Trend,
4255 Seasonal(f64),
4256 Noise,
4257}
4258
4259fn classify_ssa_component(u_col: &[f64], trend_count: usize) -> SsaComponentKind {
4261 if is_trend_component(u_col) && trend_count < 2 {
4262 SsaComponentKind::Trend
4263 } else {
4264 let (is_periodic, period) = is_periodic_component(u_col);
4265 if is_periodic {
4266 SsaComponentKind::Seasonal(period)
4267 } else {
4268 SsaComponentKind::Noise
4269 }
4270 }
4271}
4272
4273fn apply_ssa_grouping_defaults(
4275 trend_idx: &mut Vec<usize>,
4276 seasonal_idx: &mut Vec<usize>,
4277 n_comp: usize,
4278) {
4279 if trend_idx.is_empty() && n_comp > 0 {
4280 trend_idx.push(0);
4281 }
4282 if seasonal_idx.is_empty() && n_comp >= 3 {
4283 seasonal_idx.push(1);
4284 if n_comp > 2 {
4285 seasonal_idx.push(2);
4286 }
4287 }
4288}
4289
4290fn auto_group_ssa_components(
4292 u: &[f64],
4293 sigma: &[f64],
4294 l: usize,
4295 _k: usize,
4296 n_comp: usize,
4297) -> (Vec<usize>, Vec<usize>, f64, f64) {
4298 let mut trend_idx = Vec::new();
4299 let mut seasonal_idx = Vec::new();
4300 let mut detected_period = 0.0;
4301 let mut confidence = 0.0;
4302
4303 for i in 0..n_comp.min(sigma.len()) {
4304 let u_col: Vec<f64> = (0..l).map(|j| u[j + i * l]).collect();
4305 match classify_ssa_component(&u_col, trend_idx.len()) {
4306 SsaComponentKind::Trend => trend_idx.push(i),
4307 SsaComponentKind::Seasonal(period) => {
4308 seasonal_idx.push(i);
4309 if detected_period == 0.0 && period > 0.0 {
4310 detected_period = period;
4311 confidence = sigma[i] / sigma[0].max(1e-15);
4312 }
4313 }
4314 SsaComponentKind::Noise => {}
4315 }
4316 }
4317
4318 apply_ssa_grouping_defaults(&mut trend_idx, &mut seasonal_idx, n_comp);
4319 (trend_idx, seasonal_idx, detected_period, confidence)
4320}
4321
4322fn is_trend_component(u_col: &[f64]) -> bool {
4324 let n = u_col.len();
4325 if n < 3 {
4326 return false;
4327 }
4328
4329 let mut sign_changes = 0;
4331 for i in 1..n {
4332 if u_col[i] * u_col[i - 1] < 0.0 {
4333 sign_changes += 1;
4334 }
4335 }
4336
4337 sign_changes <= n / 10
4339}
4340
4341fn is_periodic_component(u_col: &[f64]) -> (bool, f64) {
4343 let n = u_col.len();
4344 if n < 4 {
4345 return (false, 0.0);
4346 }
4347
4348 let mean: f64 = u_col.iter().sum::<f64>() / n as f64;
4350 let centered: Vec<f64> = u_col.iter().map(|&x| x - mean).collect();
4351
4352 let var: f64 = centered.iter().map(|&x| x * x).sum();
4353 if var < 1e-15 {
4354 return (false, 0.0);
4355 }
4356
4357 let mut best_period = 0.0;
4359 let mut best_acf = 0.0;
4360
4361 for lag in 2..n / 2 {
4362 let mut acf = 0.0;
4363 for i in 0..(n - lag) {
4364 acf += centered[i] * centered[i + lag];
4365 }
4366 acf /= var;
4367
4368 if acf > best_acf && acf > 0.3 {
4369 best_acf = acf;
4370 best_period = lag as f64;
4371 }
4372 }
4373
4374 let is_periodic = best_acf > 0.3 && best_period > 0.0;
4375 (is_periodic, best_period)
4376}
4377
4378fn reconstruct_grouped(
4380 u: &[f64],
4381 sigma: &[f64],
4382 vt: &[f64],
4383 l: usize,
4384 k: usize,
4385 n: usize,
4386 group_idx: &[usize],
4387) -> Vec<f64> {
4388 if group_idx.is_empty() {
4389 return vec![0.0; n];
4390 }
4391
4392 let mut grouped_matrix = vec![0.0; l * k];
4394
4395 for &idx in group_idx {
4396 if idx >= sigma.len() {
4397 continue;
4398 }
4399
4400 let s = sigma[idx];
4401
4402 for j in 0..k {
4404 for i in 0..l {
4405 let u_val = u[i + idx * l];
4406 let v_val = vt[idx + j * sigma.len().min(l)]; grouped_matrix[i + j * l] += s * u_val * v_val;
4408 }
4409 }
4410 }
4411
4412 diagonal_average(&grouped_matrix, l, k, n)
4414}
4415
4416fn diagonal_average(matrix: &[f64], l: usize, k: usize, n: usize) -> Vec<f64> {
4418 let mut result = vec![0.0; n];
4419 let mut counts = vec![0.0; n];
4420
4421 for j in 0..k {
4423 for i in 0..l {
4424 let idx = i + j; if idx < n {
4426 result[idx] += matrix[i + j * l];
4427 counts[idx] += 1.0;
4428 }
4429 }
4430 }
4431
4432 for i in 0..n {
4434 if counts[i] > 0.0 {
4435 result[i] /= counts[i];
4436 }
4437 }
4438
4439 result
4440}
4441
4442pub fn ssa_fdata(
4454 data: &FdMatrix,
4455 window_length: Option<usize>,
4456 n_components: Option<usize>,
4457) -> SsaResult {
4458 let mean_curve = compute_mean_curve(data);
4460
4461 ssa(&mean_curve, window_length, n_components, None, None)
4463}
4464
4465pub fn ssa_seasonality(
4475 values: &[f64],
4476 window_length: Option<usize>,
4477 confidence_threshold: Option<f64>,
4478) -> (bool, f64, f64) {
4479 let result = ssa(values, window_length, None, None, None);
4480
4481 let threshold = confidence_threshold.unwrap_or(0.1);
4482 let is_seasonal = result.confidence >= threshold && result.detected_period > 0.0;
4483
4484 (is_seasonal, result.detected_period, result.confidence)
4485}
4486
4487#[cfg(test)]
4488mod tests {
4489 use super::*;
4490 use std::f64::consts::PI;
4491
4492 fn generate_sine(n: usize, m: usize, period: f64, argvals: &[f64]) -> FdMatrix {
4493 let mut data = vec![0.0; n * m];
4494 for i in 0..n {
4495 for j in 0..m {
4496 data[i + j * n] = (2.0 * PI * argvals[j] / period).sin();
4497 }
4498 }
4499 FdMatrix::from_column_major(data, n, m).unwrap()
4500 }
4501
4502 #[test]
4503 fn test_period_estimation_fft() {
4504 let m = 200;
4505 let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.1).collect();
4506 let period = 2.0;
4507 let data = generate_sine(1, m, period, &argvals);
4508
4509 let estimate = estimate_period_fft(&data, &argvals);
4510 assert!((estimate.period - period).abs() < 0.2);
4511 assert!(estimate.confidence > 1.0);
4512 }
4513
4514 #[test]
4515 fn test_peak_detection() {
4516 let m = 100;
4517 let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.1).collect();
4518 let period = 2.0;
4519 let data = generate_sine(1, m, period, &argvals);
4520
4521 let result = detect_peaks(&data, &argvals, Some(1.5), None, false, None);
4522
4523 assert!(!result.peaks[0].is_empty());
4525 assert!((result.mean_period - period).abs() < 0.3);
4526 }
4527
4528 #[test]
4529 fn test_peak_detection_known_sine() {
4530 let m = 200; let period = 2.0;
4534 let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 10.0 / (m - 1) as f64).collect();
4535 let data: Vec<f64> = argvals
4536 .iter()
4537 .map(|&t| (2.0 * std::f64::consts::PI * t / period).sin())
4538 .collect();
4539 let data = FdMatrix::from_column_major(data, 1, m).unwrap();
4540
4541 let result = detect_peaks(&data, &argvals, None, None, false, None);
4542
4543 assert_eq!(
4545 result.peaks[0].len(),
4546 5,
4547 "Expected 5 peaks, got {}. Peak times: {:?}",
4548 result.peaks[0].len(),
4549 result.peaks[0].iter().map(|p| p.time).collect::<Vec<_>>()
4550 );
4551
4552 let expected_times = [0.5, 2.5, 4.5, 6.5, 8.5];
4554 for (peak, expected) in result.peaks[0].iter().zip(expected_times.iter()) {
4555 assert!(
4556 (peak.time - expected).abs() < 0.15,
4557 "Peak at {:.3} not close to expected {:.3}",
4558 peak.time,
4559 expected
4560 );
4561 }
4562
4563 assert!(
4565 (result.mean_period - period).abs() < 0.1,
4566 "Mean period {:.3} not close to expected {:.3}",
4567 result.mean_period,
4568 period
4569 );
4570 }
4571
4572 #[test]
4573 fn test_peak_detection_with_min_distance() {
4574 let m = 200;
4576 let period = 2.0;
4577 let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 10.0 / (m - 1) as f64).collect();
4578 let data: Vec<f64> = argvals
4579 .iter()
4580 .map(|&t| (2.0 * std::f64::consts::PI * t / period).sin())
4581 .collect();
4582 let data = FdMatrix::from_column_major(data, 1, m).unwrap();
4583
4584 let result = detect_peaks(&data, &argvals, Some(1.5), None, false, None);
4586 assert_eq!(
4587 result.peaks[0].len(),
4588 5,
4589 "With min_distance=1.5, expected 5 peaks, got {}",
4590 result.peaks[0].len()
4591 );
4592
4593 let result2 = detect_peaks(&data, &argvals, Some(2.5), None, false, None);
4595 assert!(
4596 result2.peaks[0].len() < 5,
4597 "With min_distance=2.5, expected fewer than 5 peaks, got {}",
4598 result2.peaks[0].len()
4599 );
4600 }
4601
4602 #[test]
4603 fn test_peak_detection_period_1() {
4604 let m = 400; let period = 1.0;
4608 let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 10.0 / (m - 1) as f64).collect();
4609 let data: Vec<f64> = argvals
4610 .iter()
4611 .map(|&t| (2.0 * std::f64::consts::PI * t / period).sin())
4612 .collect();
4613 let data = FdMatrix::from_column_major(data, 1, m).unwrap();
4614
4615 let result = detect_peaks(&data, &argvals, None, None, false, None);
4616
4617 assert_eq!(
4619 result.peaks[0].len(),
4620 10,
4621 "Expected 10 peaks, got {}",
4622 result.peaks[0].len()
4623 );
4624
4625 assert!(
4627 (result.mean_period - period).abs() < 0.1,
4628 "Mean period {:.3} not close to expected {:.3}",
4629 result.mean_period,
4630 period
4631 );
4632 }
4633
4634 #[test]
4635 fn test_peak_detection_shifted_sine() {
4636 let m = 200;
4639 let period = 2.0;
4640 let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 10.0 / (m - 1) as f64).collect();
4641 let data: Vec<f64> = argvals
4642 .iter()
4643 .map(|&t| (2.0 * std::f64::consts::PI * t / period).sin() + 1.0)
4644 .collect();
4645 let data = FdMatrix::from_column_major(data, 1, m).unwrap();
4646
4647 let result = detect_peaks(&data, &argvals, None, None, false, None);
4648
4649 assert_eq!(
4651 result.peaks[0].len(),
4652 5,
4653 "Expected 5 peaks for shifted sine, got {}",
4654 result.peaks[0].len()
4655 );
4656
4657 for peak in &result.peaks[0] {
4659 assert!(
4660 (peak.value - 2.0).abs() < 0.05,
4661 "Peak value {:.3} not close to expected 2.0",
4662 peak.value
4663 );
4664 }
4665 }
4666
4667 #[test]
4668 fn test_peak_detection_prominence() {
4669 let m = 200;
4672 let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 10.0 / (m - 1) as f64).collect();
4673 let data: Vec<f64> = argvals
4674 .iter()
4675 .map(|&t| {
4676 let base = (2.0 * std::f64::consts::PI * t / 2.0).sin();
4677 let ripple = 0.1 * (2.0 * std::f64::consts::PI * t * 4.0).sin();
4679 base + ripple
4680 })
4681 .collect();
4682 let data = FdMatrix::from_column_major(data, 1, m).unwrap();
4683
4684 let result_no_filter = detect_peaks(&data, &argvals, None, None, false, None);
4686
4687 let result_filtered = detect_peaks(&data, &argvals, None, Some(0.5), false, None);
4689
4690 assert!(
4692 result_filtered.peaks[0].len() <= result_no_filter.peaks[0].len(),
4693 "Prominence filter should reduce peak count"
4694 );
4695 }
4696
4697 #[test]
4698 fn test_peak_detection_different_amplitudes() {
4699 let m = 200;
4701 let period = 2.0;
4702 let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 10.0 / (m - 1) as f64).collect();
4703
4704 for amplitude in [0.5, 1.0, 2.0, 5.0] {
4705 let data: Vec<f64> = argvals
4706 .iter()
4707 .map(|&t| amplitude * (2.0 * std::f64::consts::PI * t / period).sin())
4708 .collect();
4709 let data = FdMatrix::from_column_major(data, 1, m).unwrap();
4710
4711 let result = detect_peaks(&data, &argvals, None, None, false, None);
4712
4713 assert_eq!(
4714 result.peaks[0].len(),
4715 5,
4716 "Amplitude {} should still find 5 peaks",
4717 amplitude
4718 );
4719
4720 for peak in &result.peaks[0] {
4722 assert!(
4723 (peak.value - amplitude).abs() < 0.1,
4724 "Peak value {:.3} should be close to amplitude {}",
4725 peak.value,
4726 amplitude
4727 );
4728 }
4729 }
4730 }
4731
4732 #[test]
4733 fn test_peak_detection_varying_frequency() {
4734 let m = 400;
4737 let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 10.0 / (m - 1) as f64).collect();
4738
4739 let data: Vec<f64> = argvals
4742 .iter()
4743 .map(|&t| {
4744 let phase = 2.0 * std::f64::consts::PI * (0.5 * t + 0.05 * t * t);
4745 phase.sin()
4746 })
4747 .collect();
4748 let data = FdMatrix::from_column_major(data, 1, m).unwrap();
4749
4750 let result = detect_peaks(&data, &argvals, None, None, false, None);
4751
4752 assert!(
4754 result.peaks[0].len() >= 5,
4755 "Should find at least 5 peaks, got {}",
4756 result.peaks[0].len()
4757 );
4758
4759 let distances = &result.inter_peak_distances[0];
4761 if distances.len() >= 3 {
4762 let early_avg = (distances[0] + distances[1]) / 2.0;
4764 let late_avg = (distances[distances.len() - 2] + distances[distances.len() - 1]) / 2.0;
4765 assert!(
4766 late_avg < early_avg,
4767 "Later peaks should be closer: early avg={:.3}, late avg={:.3}",
4768 early_avg,
4769 late_avg
4770 );
4771 }
4772 }
4773
4774 #[test]
4775 fn test_peak_detection_sum_of_sines() {
4776 let m = 300;
4779 let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 12.0 / (m - 1) as f64).collect();
4780
4781 let data: Vec<f64> = argvals
4782 .iter()
4783 .map(|&t| {
4784 (2.0 * std::f64::consts::PI * t / 2.0).sin()
4785 + 0.5 * (2.0 * std::f64::consts::PI * t / 3.0).sin()
4786 })
4787 .collect();
4788 let data = FdMatrix::from_column_major(data, 1, m).unwrap();
4789
4790 let result = detect_peaks(&data, &argvals, Some(1.0), None, false, None);
4791
4792 assert!(
4794 result.peaks[0].len() >= 4,
4795 "Should find at least 4 peaks, got {}",
4796 result.peaks[0].len()
4797 );
4798
4799 let distances = &result.inter_peak_distances[0];
4801 if distances.len() >= 2 {
4802 let min_dist = distances.iter().cloned().fold(f64::INFINITY, f64::min);
4803 let max_dist = distances.iter().cloned().fold(0.0, f64::max);
4804 assert!(
4805 max_dist > min_dist * 1.1,
4806 "Distances should vary: min={:.3}, max={:.3}",
4807 min_dist,
4808 max_dist
4809 );
4810 }
4811 }
4812
4813 #[test]
4814 fn test_seasonal_strength() {
4815 let m = 200;
4816 let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.1).collect();
4817 let period = 2.0;
4818 let data = generate_sine(1, m, period, &argvals);
4819
4820 let strength = seasonal_strength_variance(&data, &argvals, period, 3);
4821 assert!(strength > 0.8);
4823
4824 let strength_spectral = seasonal_strength_spectral(&data, &argvals, period);
4825 assert!(strength_spectral > 0.5);
4826 }
4827
4828 #[test]
4829 fn test_instantaneous_period() {
4830 let m = 200;
4831 let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.1).collect();
4832 let period = 2.0;
4833 let data = generate_sine(1, m, period, &argvals);
4834
4835 let result = instantaneous_period(&data, &argvals);
4836
4837 let mid_period = result.period[m / 2];
4839 assert!(
4840 (mid_period - period).abs() < 0.5,
4841 "Expected period ~{}, got {}",
4842 period,
4843 mid_period
4844 );
4845 }
4846
4847 #[test]
4848 fn test_peak_timing_analysis() {
4849 let m = 500;
4851 let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.02).collect();
4852 let period = 2.0;
4853 let data = generate_sine(1, m, period, &argvals);
4854
4855 let result = analyze_peak_timing(&data, &argvals, period, Some(11));
4856
4857 assert!(!result.peak_times.is_empty());
4859 assert!(result.mean_timing.is_finite());
4861 assert!(result.std_timing < 0.1 || result.std_timing.is_nan());
4863 }
4864
4865 #[test]
4866 fn test_seasonality_classification() {
4867 let m = 400;
4868 let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.05).collect();
4869 let period = 2.0;
4870 let data = generate_sine(1, m, period, &argvals);
4871
4872 let result = classify_seasonality(&data, &argvals, period, None, None);
4873
4874 assert!(result.is_seasonal);
4875 assert!(result.seasonal_strength > 0.5);
4876 assert!(matches!(
4877 result.classification,
4878 SeasonalType::StableSeasonal | SeasonalType::VariableTiming
4879 ));
4880 }
4881
4882 #[test]
4883 fn test_otsu_threshold() {
4884 let values = vec![
4886 0.1, 0.12, 0.15, 0.18, 0.11, 0.14, 0.7, 0.75, 0.8, 0.85, 0.9, 0.72,
4887 ];
4888
4889 let threshold = otsu_threshold(&values);
4890
4891 assert!(threshold >= 0.1, "Threshold {} should be >= 0.1", threshold);
4895 assert!(threshold <= 0.9, "Threshold {} should be <= 0.9", threshold);
4896 }
4897
4898 #[test]
4899 fn test_gcv_fourier_nbasis_selection() {
4900 let m = 100;
4901 let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.1).collect();
4902
4903 let mut data = vec![0.0; m];
4905 for j in 0..m {
4906 data[j] = (2.0 * PI * argvals[j] / 2.0).sin() + 0.1 * (j as f64 * 0.3).sin();
4907 }
4908
4909 let data_mat = crate::matrix::FdMatrix::from_column_major(data, 1, m).unwrap();
4910 let nbasis = crate::basis::select_fourier_nbasis_gcv(&data_mat, &argvals, 5, 25);
4911
4912 assert!(nbasis >= 5);
4914 assert!(nbasis <= 25);
4915 }
4916
4917 #[test]
4918 fn test_detect_multiple_periods() {
4919 let m = 400;
4920 let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.05).collect(); let period1 = 2.0;
4924 let period2 = 7.0;
4925 let mut data = vec![0.0; m];
4926 for j in 0..m {
4927 data[j] = (2.0 * PI * argvals[j] / period1).sin()
4928 + 0.6 * (2.0 * PI * argvals[j] / period2).sin();
4929 }
4930
4931 let detected = detect_multiple_periods(&data, 1, m, &argvals, 5, 0.4, 0.20);
4933
4934 assert!(
4936 detected.len() >= 2,
4937 "Expected at least 2 periods, found {}",
4938 detected.len()
4939 );
4940
4941 let periods: Vec<f64> = detected.iter().map(|d| d.period).collect();
4943 let has_period1 = periods.iter().any(|&p| (p - period1).abs() < 0.3);
4944 let has_period2 = periods.iter().any(|&p| (p - period2).abs() < 0.5);
4945
4946 assert!(
4947 has_period1,
4948 "Expected to find period ~{}, got {:?}",
4949 period1, periods
4950 );
4951 assert!(
4952 has_period2,
4953 "Expected to find period ~{}, got {:?}",
4954 period2, periods
4955 );
4956
4957 assert!(
4959 detected[0].amplitude > detected[1].amplitude,
4960 "First detected should have higher amplitude"
4961 );
4962
4963 for d in &detected {
4965 assert!(
4966 d.strength > 0.0,
4967 "Detected period should have positive strength"
4968 );
4969 assert!(
4970 d.confidence > 0.0,
4971 "Detected period should have positive confidence"
4972 );
4973 assert!(
4974 d.amplitude > 0.0,
4975 "Detected period should have positive amplitude"
4976 );
4977 }
4978 }
4979
4980 #[test]
4985 fn test_amplitude_modulation_stable() {
4986 let m = 200;
4988 let period = 0.2;
4989 let argvals: Vec<f64> = (0..m).map(|i| i as f64 / (m - 1) as f64).collect();
4990
4991 let data: Vec<f64> = argvals
4993 .iter()
4994 .map(|&t| (2.0 * PI * t / period).sin())
4995 .collect();
4996 let data = FdMatrix::from_column_major(data, 1, m).unwrap();
4997
4998 let result = detect_amplitude_modulation(
4999 &data, &argvals, period, 0.15, 0.3, );
5002
5003 eprintln!(
5004 "Stable test: is_seasonal={}, has_modulation={}, modulation_score={:.4}, amplitude_trend={:.4}, type={:?}",
5005 result.is_seasonal, result.has_modulation, result.modulation_score, result.amplitude_trend, result.modulation_type
5006 );
5007
5008 assert!(result.is_seasonal, "Should detect seasonality");
5009 assert!(
5010 !result.has_modulation,
5011 "Constant amplitude should not have modulation, got score={:.4}",
5012 result.modulation_score
5013 );
5014 assert_eq!(
5015 result.modulation_type,
5016 ModulationType::Stable,
5017 "Should be classified as Stable"
5018 );
5019 }
5020
5021 #[test]
5022 fn test_amplitude_modulation_emerging() {
5023 let m = 200;
5025 let period = 0.2;
5026 let argvals: Vec<f64> = (0..m).map(|i| i as f64 / (m - 1) as f64).collect();
5027
5028 let data: Vec<f64> = argvals
5030 .iter()
5031 .map(|&t| {
5032 let amplitude = 0.2 + 0.8 * t; amplitude * (2.0 * PI * t / period).sin()
5034 })
5035 .collect();
5036
5037 let data = FdMatrix::from_column_major(data, 1, m).unwrap();
5038
5039 let result = detect_amplitude_modulation(&data, &argvals, period, 0.15, 0.2);
5040
5041 eprintln!(
5042 "Emerging test: is_seasonal={}, has_modulation={}, modulation_score={:.4}, amplitude_trend={:.4}, type={:?}",
5043 result.is_seasonal, result.has_modulation, result.modulation_score, result.amplitude_trend, result.modulation_type
5044 );
5045
5046 assert!(result.is_seasonal, "Should detect seasonality");
5047 assert!(
5048 result.has_modulation,
5049 "Growing amplitude should have modulation, score={:.4}",
5050 result.modulation_score
5051 );
5052 assert_eq!(
5053 result.modulation_type,
5054 ModulationType::Emerging,
5055 "Should be classified as Emerging, trend={:.4}",
5056 result.amplitude_trend
5057 );
5058 assert!(
5059 result.amplitude_trend > 0.0,
5060 "Trend should be positive for emerging"
5061 );
5062 }
5063
5064 #[test]
5065 fn test_amplitude_modulation_fading() {
5066 let m = 200;
5068 let period = 0.2;
5069 let argvals: Vec<f64> = (0..m).map(|i| i as f64 / (m - 1) as f64).collect();
5070
5071 let data: Vec<f64> = argvals
5073 .iter()
5074 .map(|&t| {
5075 let amplitude = 1.0 - 0.8 * t; amplitude * (2.0 * PI * t / period).sin()
5077 })
5078 .collect();
5079
5080 let data = FdMatrix::from_column_major(data, 1, m).unwrap();
5081
5082 let result = detect_amplitude_modulation(&data, &argvals, period, 0.15, 0.2);
5083
5084 eprintln!(
5085 "Fading test: is_seasonal={}, has_modulation={}, modulation_score={:.4}, amplitude_trend={:.4}, type={:?}",
5086 result.is_seasonal, result.has_modulation, result.modulation_score, result.amplitude_trend, result.modulation_type
5087 );
5088
5089 assert!(result.is_seasonal, "Should detect seasonality");
5090 assert!(
5091 result.has_modulation,
5092 "Fading amplitude should have modulation"
5093 );
5094 assert_eq!(
5095 result.modulation_type,
5096 ModulationType::Fading,
5097 "Should be classified as Fading, trend={:.4}",
5098 result.amplitude_trend
5099 );
5100 assert!(
5101 result.amplitude_trend < 0.0,
5102 "Trend should be negative for fading"
5103 );
5104 }
5105
5106 #[test]
5107 fn test_amplitude_modulation_oscillating() {
5108 let m = 200;
5110 let period = 0.1;
5111 let argvals: Vec<f64> = (0..m).map(|i| i as f64 / (m - 1) as f64).collect();
5112
5113 let data: Vec<f64> = argvals
5115 .iter()
5116 .map(|&t| {
5117 let amplitude = 0.5 + 0.4 * (2.0 * PI * t * 2.0).sin(); amplitude * (2.0 * PI * t / period).sin()
5119 })
5120 .collect();
5121
5122 let data = FdMatrix::from_column_major(data, 1, m).unwrap();
5123
5124 let result = detect_amplitude_modulation(&data, &argvals, period, 0.15, 0.2);
5125
5126 eprintln!(
5127 "Oscillating test: is_seasonal={}, has_modulation={}, modulation_score={:.4}, amplitude_trend={:.4}, type={:?}",
5128 result.is_seasonal, result.has_modulation, result.modulation_score, result.amplitude_trend, result.modulation_type
5129 );
5130
5131 assert!(result.is_seasonal, "Should detect seasonality");
5132 if result.has_modulation {
5134 assert!(
5136 result.amplitude_trend.abs() < 0.5,
5137 "Trend should be small for oscillating"
5138 );
5139 }
5140 }
5141
5142 #[test]
5143 fn test_amplitude_modulation_non_seasonal() {
5144 let m = 100;
5146 let argvals: Vec<f64> = (0..m).map(|i| i as f64 / (m - 1) as f64).collect();
5147
5148 let data: Vec<f64> = (0..m)
5150 .map(|i| ((i as f64 * 1.618).sin() * 100.0).fract())
5151 .collect();
5152
5153 let data = FdMatrix::from_column_major(data, 1, m).unwrap();
5154
5155 let result = detect_amplitude_modulation(
5156 &data, &argvals, 0.2, 0.15, 0.3,
5158 );
5159
5160 assert!(
5161 !result.is_seasonal,
5162 "Noise should not be detected as seasonal"
5163 );
5164 assert_eq!(
5165 result.modulation_type,
5166 ModulationType::NonSeasonal,
5167 "Should be classified as NonSeasonal"
5168 );
5169 }
5170
5171 #[test]
5176 fn test_wavelet_amplitude_modulation_stable() {
5177 let m = 200;
5178 let period = 0.2;
5179 let argvals: Vec<f64> = (0..m).map(|i| i as f64 / (m - 1) as f64).collect();
5180
5181 let data: Vec<f64> = argvals
5182 .iter()
5183 .map(|&t| (2.0 * PI * t / period).sin())
5184 .collect();
5185
5186 let data = FdMatrix::from_column_major(data, 1, m).unwrap();
5187
5188 let result = detect_amplitude_modulation_wavelet(&data, &argvals, period, 0.15, 0.3);
5189
5190 eprintln!(
5191 "Wavelet stable: is_seasonal={}, has_modulation={}, modulation_score={:.4}, amplitude_trend={:.4}, type={:?}",
5192 result.is_seasonal, result.has_modulation, result.modulation_score, result.amplitude_trend, result.modulation_type
5193 );
5194
5195 assert!(result.is_seasonal, "Should detect seasonality");
5196 assert!(
5197 !result.has_modulation,
5198 "Constant amplitude should not have modulation, got score={:.4}",
5199 result.modulation_score
5200 );
5201 }
5202
5203 #[test]
5204 fn test_wavelet_amplitude_modulation_emerging() {
5205 let m = 200;
5206 let period = 0.2;
5207 let argvals: Vec<f64> = (0..m).map(|i| i as f64 / (m - 1) as f64).collect();
5208
5209 let data: Vec<f64> = argvals
5211 .iter()
5212 .map(|&t| {
5213 let amplitude = 0.2 + 0.8 * t;
5214 amplitude * (2.0 * PI * t / period).sin()
5215 })
5216 .collect();
5217
5218 let data = FdMatrix::from_column_major(data, 1, m).unwrap();
5219
5220 let result = detect_amplitude_modulation_wavelet(&data, &argvals, period, 0.15, 0.2);
5221
5222 eprintln!(
5223 "Wavelet emerging: is_seasonal={}, has_modulation={}, modulation_score={:.4}, amplitude_trend={:.4}, type={:?}",
5224 result.is_seasonal, result.has_modulation, result.modulation_score, result.amplitude_trend, result.modulation_type
5225 );
5226
5227 assert!(result.is_seasonal, "Should detect seasonality");
5228 assert!(
5229 result.has_modulation,
5230 "Growing amplitude should have modulation"
5231 );
5232 assert!(
5233 result.amplitude_trend > 0.0,
5234 "Trend should be positive for emerging"
5235 );
5236 }
5237
5238 #[test]
5239 fn test_wavelet_amplitude_modulation_fading() {
5240 let m = 200;
5241 let period = 0.2;
5242 let argvals: Vec<f64> = (0..m).map(|i| i as f64 / (m - 1) as f64).collect();
5243
5244 let data: Vec<f64> = argvals
5246 .iter()
5247 .map(|&t| {
5248 let amplitude = 1.0 - 0.8 * t;
5249 amplitude * (2.0 * PI * t / period).sin()
5250 })
5251 .collect();
5252
5253 let data = FdMatrix::from_column_major(data, 1, m).unwrap();
5254
5255 let result = detect_amplitude_modulation_wavelet(&data, &argvals, period, 0.15, 0.2);
5256
5257 eprintln!(
5258 "Wavelet fading: is_seasonal={}, has_modulation={}, modulation_score={:.4}, amplitude_trend={:.4}, type={:?}",
5259 result.is_seasonal, result.has_modulation, result.modulation_score, result.amplitude_trend, result.modulation_type
5260 );
5261
5262 assert!(result.is_seasonal, "Should detect seasonality");
5263 assert!(
5264 result.has_modulation,
5265 "Fading amplitude should have modulation"
5266 );
5267 assert!(
5268 result.amplitude_trend < 0.0,
5269 "Trend should be negative for fading"
5270 );
5271 }
5272
5273 #[test]
5274 fn test_seasonal_strength_wavelet() {
5275 let m = 200;
5276 let period = 0.2;
5277 let argvals: Vec<f64> = (0..m).map(|i| i as f64 / (m - 1) as f64).collect();
5278
5279 let seasonal_data: Vec<f64> = argvals
5281 .iter()
5282 .map(|&t| (2.0 * PI * t / period).sin())
5283 .collect();
5284
5285 let seasonal_data = FdMatrix::from_column_major(seasonal_data, 1, m).unwrap();
5286
5287 let strength = seasonal_strength_wavelet(&seasonal_data, &argvals, period);
5288 eprintln!("Wavelet strength (pure sine): {:.4}", strength);
5289 assert!(
5290 strength > 0.5,
5291 "Pure sine should have high wavelet strength"
5292 );
5293
5294 let noise_data: Vec<f64> = (0..m)
5296 .map(|i| ((i * 12345 + 67890) % 1000) as f64 / 1000.0 - 0.5)
5297 .collect();
5298
5299 let noise_data = FdMatrix::from_column_major(noise_data, 1, m).unwrap();
5300
5301 let noise_strength = seasonal_strength_wavelet(&noise_data, &argvals, period);
5302 eprintln!("Wavelet strength (noise): {:.4}", noise_strength);
5303 assert!(
5304 noise_strength < 0.3,
5305 "Noise should have low wavelet strength"
5306 );
5307
5308 let wrong_period_strength =
5310 seasonal_strength_wavelet(&seasonal_data, &argvals, period * 2.0);
5311 eprintln!(
5312 "Wavelet strength (wrong period): {:.4}",
5313 wrong_period_strength
5314 );
5315 assert!(
5316 wrong_period_strength < strength,
5317 "Wrong period should have lower strength"
5318 );
5319 }
5320
5321 #[test]
5322 fn test_compute_mean_curve() {
5323 let data = FdMatrix::from_column_major(vec![1.0, 2.0, 2.0, 4.0, 3.0, 6.0], 2, 3).unwrap();
5328 let mean = compute_mean_curve(&data);
5329 assert_eq!(mean.len(), 3);
5330 assert!((mean[0] - 1.5).abs() < 1e-10);
5331 assert!((mean[1] - 3.0).abs() < 1e-10);
5332 assert!((mean[2] - 4.5).abs() < 1e-10);
5333 }
5334
5335 #[test]
5336 fn test_compute_mean_curve_parallel_consistency() {
5337 let n = 10;
5339 let m = 200;
5340 let data: Vec<f64> = (0..n * m).map(|i| (i as f64 * 0.1).sin()).collect();
5341
5342 let data = FdMatrix::from_column_major(data, n, m).unwrap();
5343
5344 let seq_result = compute_mean_curve_impl(&data, false);
5345 let par_result = compute_mean_curve_impl(&data, true);
5346
5347 assert_eq!(seq_result.len(), par_result.len());
5348 for (s, p) in seq_result.iter().zip(par_result.iter()) {
5349 assert!(
5350 (s - p).abs() < 1e-10,
5351 "Sequential and parallel results differ"
5352 );
5353 }
5354 }
5355
5356 #[test]
5357 fn test_interior_bounds() {
5358 let bounds = interior_bounds(100);
5360 assert!(bounds.is_some());
5361 let (start, end) = bounds.unwrap();
5362 assert_eq!(start, 10);
5363 assert_eq!(end, 90);
5364
5365 let bounds = interior_bounds(10);
5367 assert!(bounds.is_some());
5368 let (start, end) = bounds.unwrap();
5369 assert!(start < end);
5370
5371 let bounds = interior_bounds(2);
5373 assert!(bounds.is_some() || bounds.is_none());
5375 }
5376
5377 #[test]
5378 fn test_hilbert_transform_pure_sine() {
5379 let m = 200;
5381 let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.1).collect();
5382 let signal: Vec<f64> = argvals.iter().map(|&t| (2.0 * PI * t).sin()).collect();
5383
5384 let analytic = hilbert_transform(&signal);
5385 assert_eq!(analytic.len(), m);
5386
5387 for c in analytic.iter().skip(10).take(m - 20) {
5389 let amp = c.norm();
5390 assert!(
5391 (amp - 1.0).abs() < 0.1,
5392 "Amplitude should be ~1, got {}",
5393 amp
5394 );
5395 }
5396 }
5397
5398 #[test]
5399 fn test_sazed_pure_sine() {
5400 let m = 200;
5402 let period = 2.0;
5403 let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.1).collect();
5404 let data: Vec<f64> = argvals
5405 .iter()
5406 .map(|&t| (2.0 * PI * t / period).sin())
5407 .collect();
5408
5409 let result = sazed(&data, &argvals, None);
5410
5411 assert!(result.period.is_finite(), "SAZED should detect a period");
5412 assert!(
5413 (result.period - period).abs() < 0.3,
5414 "Expected period ~{}, got {}",
5415 period,
5416 result.period
5417 );
5418 assert!(
5419 result.confidence > 0.4,
5420 "Expected confidence > 0.4, got {}",
5421 result.confidence
5422 );
5423 assert!(
5424 result.agreeing_components >= 2,
5425 "Expected at least 2 agreeing components, got {}",
5426 result.agreeing_components
5427 );
5428 }
5429
5430 #[test]
5431 fn test_sazed_noisy_sine() {
5432 let m = 300;
5434 let period = 3.0;
5435 let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.1).collect();
5436
5437 let data: Vec<f64> = argvals
5439 .iter()
5440 .enumerate()
5441 .map(|(i, &t)| {
5442 let signal = (2.0 * PI * t / period).sin();
5443 let noise = 0.1 * (17.3 * i as f64).sin();
5444 signal + noise
5445 })
5446 .collect();
5447
5448 let result = sazed(&data, &argvals, Some(0.15));
5449
5450 assert!(
5451 result.period.is_finite(),
5452 "SAZED should detect a period even with noise"
5453 );
5454 assert!(
5455 (result.period - period).abs() < 0.5,
5456 "Expected period ~{}, got {}",
5457 period,
5458 result.period
5459 );
5460 }
5461
5462 #[test]
5463 fn test_sazed_fdata() {
5464 let n = 5;
5466 let m = 200;
5467 let period = 2.0;
5468 let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.1).collect();
5469 let data = generate_sine(n, m, period, &argvals);
5470
5471 let result = sazed_fdata(&data, &argvals, None);
5472
5473 assert!(result.period.is_finite(), "SAZED should detect a period");
5474 assert!(
5475 (result.period - period).abs() < 0.3,
5476 "Expected period ~{}, got {}",
5477 period,
5478 result.period
5479 );
5480 }
5481
5482 #[test]
5483 fn test_sazed_short_series() {
5484 let argvals: Vec<f64> = (0..5).map(|i| i as f64).collect();
5486 let data: Vec<f64> = argvals.iter().map(|&t| t.sin()).collect();
5487
5488 let result = sazed(&data, &argvals, None);
5489
5490 assert!(
5492 result.period.is_nan() || result.period.is_finite(),
5493 "Should return NaN or valid period"
5494 );
5495 }
5496
5497 #[test]
5498 fn test_autoperiod_pure_sine() {
5499 let m = 200;
5501 let period = 2.0;
5502 let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.1).collect();
5503 let data: Vec<f64> = argvals
5504 .iter()
5505 .map(|&t| (2.0 * PI * t / period).sin())
5506 .collect();
5507
5508 let result = autoperiod(&data, &argvals, None, None);
5509
5510 assert!(
5511 result.period.is_finite(),
5512 "Autoperiod should detect a period"
5513 );
5514 assert!(
5515 (result.period - period).abs() < 0.3,
5516 "Expected period ~{}, got {}",
5517 period,
5518 result.period
5519 );
5520 assert!(
5521 result.confidence > 0.0,
5522 "Expected positive confidence, got {}",
5523 result.confidence
5524 );
5525 }
5526
5527 #[test]
5528 fn test_autoperiod_with_trend() {
5529 let m = 300;
5531 let period = 3.0;
5532 let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.1).collect();
5533 let data: Vec<f64> = argvals
5534 .iter()
5535 .map(|&t| 0.2 * t + (2.0 * PI * t / period).sin())
5536 .collect();
5537
5538 let result = autoperiod(&data, &argvals, None, None);
5539
5540 assert!(
5541 result.period.is_finite(),
5542 "Autoperiod should detect a period"
5543 );
5544 assert!(
5546 (result.period - period).abs() < 0.5,
5547 "Expected period ~{}, got {}",
5548 period,
5549 result.period
5550 );
5551 }
5552
5553 #[test]
5554 fn test_autoperiod_candidates() {
5555 let m = 200;
5557 let period = 2.0;
5558 let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.1).collect();
5559 let data: Vec<f64> = argvals
5560 .iter()
5561 .map(|&t| (2.0 * PI * t / period).sin())
5562 .collect();
5563
5564 let result = autoperiod(&data, &argvals, Some(5), Some(10));
5565
5566 assert!(
5567 !result.candidates.is_empty(),
5568 "Should have at least one candidate"
5569 );
5570
5571 let max_score = result
5573 .candidates
5574 .iter()
5575 .map(|c| c.combined_score)
5576 .fold(f64::NEG_INFINITY, f64::max);
5577 assert!(
5578 (result.confidence - max_score).abs() < 1e-10,
5579 "Returned confidence should match best candidate's score"
5580 );
5581 }
5582
5583 #[test]
5584 fn test_autoperiod_fdata() {
5585 let n = 5;
5587 let m = 200;
5588 let period = 2.0;
5589 let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.1).collect();
5590 let data = generate_sine(n, m, period, &argvals);
5591
5592 let result = autoperiod_fdata(&data, &argvals, None, None);
5593
5594 assert!(
5595 result.period.is_finite(),
5596 "Autoperiod should detect a period"
5597 );
5598 assert!(
5599 (result.period - period).abs() < 0.3,
5600 "Expected period ~{}, got {}",
5601 period,
5602 result.period
5603 );
5604 }
5605
5606 #[test]
5607 fn test_cfd_autoperiod_pure_sine() {
5608 let m = 200;
5610 let period = 2.0;
5611 let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.1).collect();
5612 let data: Vec<f64> = argvals
5613 .iter()
5614 .map(|&t| (2.0 * PI * t / period).sin())
5615 .collect();
5616
5617 let result = cfd_autoperiod(&data, &argvals, None, None);
5618
5619 assert!(
5620 result.period.is_finite(),
5621 "CFDAutoperiod should detect a period"
5622 );
5623 assert!(
5624 (result.period - period).abs() < 0.3,
5625 "Expected period ~{}, got {}",
5626 period,
5627 result.period
5628 );
5629 }
5630
5631 #[test]
5632 fn test_cfd_autoperiod_with_trend() {
5633 let m = 300;
5635 let period = 3.0;
5636 let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.1).collect();
5637 let data: Vec<f64> = argvals
5638 .iter()
5639 .map(|&t| 2.0 * t + (2.0 * PI * t / period).sin())
5640 .collect();
5641
5642 let result = cfd_autoperiod(&data, &argvals, None, None);
5643
5644 assert!(
5645 result.period.is_finite(),
5646 "CFDAutoperiod should detect a period despite trend"
5647 );
5648 assert!(
5650 (result.period - period).abs() < 0.6,
5651 "Expected period ~{}, got {}",
5652 period,
5653 result.period
5654 );
5655 }
5656
5657 #[test]
5658 fn test_cfd_autoperiod_multiple_periods() {
5659 let m = 400;
5661 let period1 = 2.0;
5662 let period2 = 5.0;
5663 let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.1).collect();
5664 let data: Vec<f64> = argvals
5665 .iter()
5666 .map(|&t| (2.0 * PI * t / period1).sin() + 0.5 * (2.0 * PI * t / period2).sin())
5667 .collect();
5668
5669 let result = cfd_autoperiod(&data, &argvals, None, None);
5670
5671 assert!(
5672 !result.periods.is_empty(),
5673 "Should detect at least one period"
5674 );
5675 let close_to_p1 = (result.period - period1).abs() < 0.5;
5677 let close_to_p2 = (result.period - period2).abs() < 1.0;
5678 assert!(
5679 close_to_p1 || close_to_p2,
5680 "Primary period {} not close to {} or {}",
5681 result.period,
5682 period1,
5683 period2
5684 );
5685 }
5686
5687 #[test]
5688 fn test_cfd_autoperiod_fdata() {
5689 let n = 5;
5691 let m = 200;
5692 let period = 2.0;
5693 let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.1).collect();
5694 let data = generate_sine(n, m, period, &argvals);
5695
5696 let result = cfd_autoperiod_fdata(&data, &argvals, None, None);
5697
5698 assert!(
5699 result.period.is_finite(),
5700 "CFDAutoperiod should detect a period"
5701 );
5702 assert!(
5703 (result.period - period).abs() < 0.3,
5704 "Expected period ~{}, got {}",
5705 period,
5706 result.period
5707 );
5708 }
5709
5710 #[test]
5715 fn test_ssa_pure_sine() {
5716 let n = 200;
5717 let period = 12.0;
5718 let values: Vec<f64> = (0..n)
5719 .map(|i| {
5720 let t = i as f64;
5721 0.01 * t + (2.0 * PI * t / period).sin() + 0.05 * ((i * 7) as f64 * 0.1).sin()
5722 })
5723 .collect();
5724
5725 let result = ssa(&values, None, None, None, None);
5726
5727 for i in 0..n {
5729 let reconstructed = result.trend[i] + result.seasonal[i] + result.noise[i];
5730 assert!(
5731 (reconstructed - values[i]).abs() < 1e-8,
5732 "SSA reconstruction error at {}: {} vs {}",
5733 i,
5734 reconstructed,
5735 values[i]
5736 );
5737 }
5738
5739 for i in 1..result.singular_values.len() {
5741 assert!(
5742 result.singular_values[i] <= result.singular_values[i - 1] + 1e-10,
5743 "Singular values should be descending: {} > {}",
5744 result.singular_values[i],
5745 result.singular_values[i - 1]
5746 );
5747 }
5748
5749 let total_contrib: f64 = result.contributions.iter().sum();
5751 assert!(
5752 total_contrib <= 1.0 + 1e-10,
5753 "Contributions should sum to <= 1, got {}",
5754 total_contrib
5755 );
5756 }
5757
5758 #[test]
5759 fn test_ssa_explicit_groupings() {
5760 let n = 100;
5761 let period = 10.0;
5762 let values: Vec<f64> = (0..n)
5763 .map(|i| 0.01 * i as f64 + (2.0 * PI * i as f64 / period).sin())
5764 .collect();
5765
5766 let trend_components = [0usize];
5767 let seasonal_components = [1usize, 2];
5768
5769 let result = ssa(
5770 &values,
5771 None,
5772 None,
5773 Some(&trend_components),
5774 Some(&seasonal_components),
5775 );
5776
5777 assert_eq!(result.trend.len(), n);
5778 assert_eq!(result.seasonal.len(), n);
5779 assert_eq!(result.noise.len(), n);
5780
5781 for i in 0..n {
5783 let reconstructed = result.trend[i] + result.seasonal[i] + result.noise[i];
5784 assert!(
5785 (reconstructed - values[i]).abs() < 1e-8,
5786 "SSA explicit grouping reconstruction error at {}",
5787 i
5788 );
5789 }
5790 }
5791
5792 #[test]
5793 fn test_ssa_short_series() {
5794 let values = vec![1.0, 2.0, 3.0];
5796 let result = ssa(&values, None, None, None, None);
5797
5798 assert_eq!(result.trend, values);
5799 assert_eq!(result.seasonal, vec![0.0; 3]);
5800 assert_eq!(result.noise, vec![0.0; 3]);
5801 assert_eq!(result.n_components, 0);
5802 }
5803
5804 #[test]
5805 fn test_ssa_fdata() {
5806 let n = 5;
5807 let m = 100;
5808 let mut data = vec![0.0; n * m];
5809 for i in 0..n {
5810 let amp = (i + 1) as f64;
5811 for j in 0..m {
5812 data[i + j * n] = amp * (2.0 * PI * j as f64 / 12.0).sin() + 0.01 * j as f64;
5813 }
5814 }
5815
5816 let data = FdMatrix::from_column_major(data, n, m).unwrap();
5817
5818 let result = ssa_fdata(&data, None, None);
5819
5820 assert_eq!(result.trend.len(), m);
5821 assert_eq!(result.seasonal.len(), m);
5822 assert_eq!(result.noise.len(), m);
5823 assert!(!result.singular_values.is_empty());
5824 }
5825
5826 #[test]
5827 fn test_ssa_seasonality() {
5828 let n = 200;
5830 let period = 12.0;
5831 let seasonal_values: Vec<f64> = (0..n)
5832 .map(|i| (2.0 * PI * i as f64 / period).sin())
5833 .collect();
5834
5835 let (is_seasonal, _det_period, confidence) =
5836 ssa_seasonality(&seasonal_values, None, Some(0.05));
5837
5838 assert!(confidence >= 0.0, "Confidence should be non-negative");
5841
5842 let noise_values: Vec<f64> = (0..n)
5844 .map(|i| ((i * 13 + 7) as f64 * 0.1).sin() * 0.01)
5845 .collect();
5846
5847 let (is_noise_seasonal, _, noise_conf) = ssa_seasonality(&noise_values, None, Some(0.5));
5848
5849 let _ = (is_seasonal, is_noise_seasonal, noise_conf);
5852 }
5853
5854 #[test]
5859 fn test_matrix_profile_periodic() {
5860 let period = 20;
5861 let n = period * 10;
5862 let values: Vec<f64> = (0..n)
5863 .map(|i| (2.0 * PI * i as f64 / period as f64).sin())
5864 .collect();
5865
5866 let result = matrix_profile(&values, Some(15), None);
5867
5868 assert_eq!(result.profile.len(), n - 15 + 1);
5869 assert_eq!(result.profile_index.len(), n - 15 + 1);
5870 assert_eq!(result.subsequence_length, 15);
5871
5872 for &p in &result.profile {
5874 assert!(p.is_finite(), "Profile values should be finite");
5875 }
5876
5877 assert!(
5879 (result.primary_period - period as f64).abs() < 5.0,
5880 "Expected primary_period ≈ {}, got {}",
5881 period,
5882 result.primary_period
5883 );
5884 }
5885
5886 #[test]
5887 fn test_matrix_profile_non_periodic() {
5888 let n = 200;
5890 let values: Vec<f64> = (0..n).map(|i| i as f64 * 0.1).collect();
5891
5892 let result = matrix_profile(&values, Some(10), None);
5893
5894 assert_eq!(result.profile.len(), n - 10 + 1);
5895
5896 assert!(result.confidence <= 1.0, "Confidence should be <= 1.0");
5899 }
5900
5901 #[test]
5902 fn test_matrix_profile_fdata() {
5903 let n = 3;
5904 let m = 200;
5905 let period = 20.0;
5906 let mut data = vec![0.0; n * m];
5907 for i in 0..n {
5908 let amp = (i + 1) as f64;
5909 for j in 0..m {
5910 data[i + j * n] = amp * (2.0 * PI * j as f64 / period).sin();
5911 }
5912 }
5913
5914 let data = FdMatrix::from_column_major(data, n, m).unwrap();
5915
5916 let result = matrix_profile_fdata(&data, Some(15), None);
5917
5918 assert!(!result.profile.is_empty());
5919 assert!(result.profile_index.len() == result.profile.len());
5920 }
5921
5922 #[test]
5923 fn test_matrix_profile_seasonality() {
5924 let period = 20;
5925 let n = period * 10;
5926 let values: Vec<f64> = (0..n)
5928 .map(|i| (2.0 * PI * i as f64 / period as f64).sin())
5929 .collect();
5930
5931 let (is_seasonal, det_period, confidence) =
5932 matrix_profile_seasonality(&values, Some(15), Some(0.05));
5933
5934 assert!(
5935 is_seasonal,
5936 "Periodic signal should be detected as seasonal"
5937 );
5938 assert!(det_period > 0.0, "Detected period should be positive");
5939 assert!(confidence >= 0.05, "Confidence should be above threshold");
5940
5941 let weak_values: Vec<f64> = (0..n).map(|i| i as f64 * 0.001).collect();
5943 let (is_weak_seasonal, _, _) =
5944 matrix_profile_seasonality(&weak_values, Some(15), Some(0.5));
5945 let _ = is_weak_seasonal; }
5947
5948 #[test]
5953 fn test_lomb_scargle_regular() {
5954 let n = 200;
5955 let true_period = 5.0;
5956 let times: Vec<f64> = (0..n).map(|i| i as f64 * 0.1).collect();
5957 let values: Vec<f64> = times
5958 .iter()
5959 .map(|&t| (2.0 * PI * t / true_period).sin())
5960 .collect();
5961
5962 let result = lomb_scargle(×, &values, None, None, None);
5963
5964 assert!(
5965 (result.peak_period - true_period).abs() < 0.5,
5966 "Expected peak_period ≈ {}, got {}",
5967 true_period,
5968 result.peak_period
5969 );
5970 assert!(
5971 result.false_alarm_probability < 0.05,
5972 "FAP should be low for strong signal: {}",
5973 result.false_alarm_probability
5974 );
5975 assert!(result.peak_power > 0.0, "Peak power should be positive");
5976 assert!(!result.frequencies.is_empty());
5977 assert_eq!(result.frequencies.len(), result.power.len());
5978 assert_eq!(result.frequencies.len(), result.periods.len());
5979 }
5980
5981 #[test]
5982 fn test_lomb_scargle_irregular() {
5983 let true_period = 5.0;
5984 let all_times: Vec<f64> = (0..300).map(|i| i as f64 * 0.1).collect();
5986 let times: Vec<f64> = all_times
5988 .iter()
5989 .enumerate()
5990 .filter(|(i, _)| i % 2 == 0 || i % 3 == 0)
5991 .map(|(_, &t)| t)
5992 .collect();
5993 let values: Vec<f64> = times
5994 .iter()
5995 .map(|&t| (2.0 * PI * t / true_period).sin())
5996 .collect();
5997
5998 let result = lomb_scargle(×, &values, None, None, None);
5999
6000 assert!(
6001 (result.peak_period - true_period).abs() < 1.0,
6002 "Irregular LS: expected period ≈ {}, got {}",
6003 true_period,
6004 result.peak_period
6005 );
6006 }
6007
6008 #[test]
6009 fn test_lomb_scargle_custom_frequencies() {
6010 let n = 100;
6011 let true_period = 5.0;
6012 let times: Vec<f64> = (0..n).map(|i| i as f64 * 0.1).collect();
6013 let values: Vec<f64> = times
6014 .iter()
6015 .map(|&t| (2.0 * PI * t / true_period).sin())
6016 .collect();
6017
6018 let frequencies: Vec<f64> = (1..50).map(|i| i as f64 * 0.01).collect();
6020 let result = lomb_scargle(×, &values, Some(&frequencies), None, None);
6021
6022 assert_eq!(result.frequencies.len(), frequencies.len());
6023 assert_eq!(result.power.len(), frequencies.len());
6024 assert!(result.peak_power > 0.0);
6025 }
6026
6027 #[test]
6028 fn test_lomb_scargle_fdata() {
6029 let n = 5;
6030 let m = 200;
6031 let period = 5.0;
6032 let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.1).collect();
6033 let data = generate_sine(n, m, period, &argvals);
6034
6035 let result = lomb_scargle_fdata(&data, &argvals, None, None);
6036
6037 assert!(
6038 (result.peak_period - period).abs() < 0.5,
6039 "Fdata LS: expected period ≈ {}, got {}",
6040 period,
6041 result.peak_period
6042 );
6043 assert!(!result.frequencies.is_empty());
6044 }
6045
6046 #[test]
6051 fn test_detect_seasonality_changes_onset() {
6052 let m = 400;
6053 let period = 2.0;
6054 let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.05).collect(); let data: Vec<f64> = argvals
6058 .iter()
6059 .map(|&t| {
6060 if t < 10.0 {
6061 0.05 * ((t * 13.0).sin() + (t * 7.0).cos())
6063 } else {
6064 (2.0 * PI * t / period).sin()
6066 }
6067 })
6068 .collect();
6069 let data = FdMatrix::from_column_major(data, 1, m).unwrap();
6070
6071 let result = detect_seasonality_changes(&data, &argvals, period, 0.3, 4.0, 2.0);
6072
6073 assert!(
6074 !result.strength_curve.is_empty(),
6075 "Strength curve should not be empty"
6076 );
6077 assert_eq!(result.strength_curve.len(), m);
6078
6079 if !result.change_points.is_empty() {
6081 let onset_points: Vec<_> = result
6082 .change_points
6083 .iter()
6084 .filter(|cp| cp.change_type == ChangeType::Onset)
6085 .collect();
6086 assert!(!onset_points.is_empty(), "Should detect Onset change point");
6088 }
6089 }
6090
6091 #[test]
6092 fn test_detect_seasonality_changes_no_change() {
6093 let m = 400;
6094 let period = 2.0;
6095 let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.05).collect();
6096
6097 let data: Vec<f64> = argvals
6099 .iter()
6100 .map(|&t| (2.0 * PI * t / period).sin())
6101 .collect();
6102 let data = FdMatrix::from_column_major(data, 1, m).unwrap();
6103
6104 let result = detect_seasonality_changes(&data, &argvals, period, 0.3, 4.0, 2.0);
6105
6106 assert!(!result.strength_curve.is_empty());
6107 let cessation_points: Vec<_> = result
6109 .change_points
6110 .iter()
6111 .filter(|cp| cp.change_type == ChangeType::Cessation)
6112 .collect();
6113 assert!(
6114 cessation_points.is_empty(),
6115 "Consistently seasonal signal should have no Cessation points, found {}",
6116 cessation_points.len()
6117 );
6118 }
6119
6120 #[test]
6125 fn test_estimate_period_acf() {
6126 let m = 200;
6127 let period = 2.0;
6128 let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.1).collect();
6129 let data = generate_sine(1, m, period, &argvals);
6130
6131 let estimate = estimate_period_acf(data.as_slice(), 1, m, &argvals, m / 2);
6132
6133 assert!(
6134 estimate.period.is_finite(),
6135 "ACF period estimate should be finite"
6136 );
6137 assert!(
6138 (estimate.period - period).abs() < 0.5,
6139 "ACF expected period ≈ {}, got {}",
6140 period,
6141 estimate.period
6142 );
6143 assert!(
6144 estimate.confidence > 0.0,
6145 "ACF confidence should be positive"
6146 );
6147 }
6148
6149 #[test]
6150 fn test_estimate_period_regression() {
6151 let m = 200;
6152 let period = 2.0;
6153 let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.1).collect();
6154 let data = generate_sine(1, m, period, &argvals);
6155
6156 let estimate =
6157 estimate_period_regression(data.as_slice(), 1, m, &argvals, 1.5, 3.0, 100, 3);
6158
6159 assert!(
6160 estimate.period.is_finite(),
6161 "Regression period estimate should be finite"
6162 );
6163 assert!(
6164 (estimate.period - period).abs() < 0.5,
6165 "Regression expected period ≈ {}, got {}",
6166 period,
6167 estimate.period
6168 );
6169 assert!(
6170 estimate.confidence > 0.0,
6171 "Regression confidence should be positive"
6172 );
6173 }
6174
6175 #[test]
6180 fn test_seasonal_strength_windowed_variance() {
6181 let m = 200;
6182 let period = 2.0;
6183 let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.1).collect();
6184 let data = generate_sine(1, m, period, &argvals);
6185
6186 let strengths = seasonal_strength_windowed(
6187 &data,
6188 &argvals,
6189 period,
6190 4.0, StrengthMethod::Variance,
6192 );
6193
6194 assert_eq!(strengths.len(), m, "Should return m values");
6195
6196 let interior_start = m / 4;
6198 let interior_end = 3 * m / 4;
6199 for i in interior_start..interior_end {
6200 let s = strengths[i];
6201 if s.is_finite() {
6202 assert!(
6203 (-0.01..=1.01).contains(&s),
6204 "Windowed strength at {} should be near [0,1], got {}",
6205 i,
6206 s
6207 );
6208 }
6209 }
6210 }
6211}