Skip to main content

fdars_core/
seasonal.rs

1//! Seasonal time series analysis for functional data.
2//!
3//! This module provides functions for analyzing seasonal patterns in functional data:
4//! - Period estimation (FFT, autocorrelation, regression-based)
5//! - Peak detection with prominence calculation
6//! - Seasonal strength measurement (variance and spectral methods)
7//! - Seasonality change detection (onset/cessation)
8//! - Instantaneous period estimation for drifting seasonality
9//! - Peak timing variability analysis for short series
10//! - Seasonality classification
11
12use crate::basis::fourier_basis_with_period;
13use crate::fdata::deriv_1d;
14use crate::{iter_maybe_parallel, slice_maybe_parallel};
15use num_complex::Complex;
16#[cfg(feature = "parallel")]
17use rayon::iter::ParallelIterator;
18use rustfft::FftPlanner;
19use std::f64::consts::PI;
20
21/// Result of period estimation.
22#[derive(Debug, Clone)]
23pub struct PeriodEstimate {
24    /// Estimated period
25    pub period: f64,
26    /// Dominant frequency (1/period)
27    pub frequency: f64,
28    /// Power at the dominant frequency
29    pub power: f64,
30    /// Confidence measure (ratio of peak power to mean power)
31    pub confidence: f64,
32}
33
34/// A detected peak in functional data.
35#[derive(Debug, Clone)]
36pub struct Peak {
37    /// Time at which the peak occurs
38    pub time: f64,
39    /// Value at the peak
40    pub value: f64,
41    /// Prominence of the peak (height relative to surrounding valleys)
42    pub prominence: f64,
43}
44
45/// Result of peak detection.
46#[derive(Debug, Clone)]
47pub struct PeakDetectionResult {
48    /// Peaks for each sample: `peaks[sample_idx]` contains peaks for that sample
49    pub peaks: Vec<Vec<Peak>>,
50    /// Inter-peak distances for each sample
51    pub inter_peak_distances: Vec<Vec<f64>>,
52    /// Mean period estimated from inter-peak distances across all samples
53    pub mean_period: f64,
54}
55
56/// A detected period from multiple period detection.
57#[derive(Debug, Clone)]
58pub struct DetectedPeriod {
59    /// Estimated period
60    pub period: f64,
61    /// FFT confidence (ratio of peak power to mean power)
62    pub confidence: f64,
63    /// Seasonal strength at this period (variance explained)
64    pub strength: f64,
65    /// Amplitude of the sinusoidal component
66    pub amplitude: f64,
67    /// Phase of the sinusoidal component (radians)
68    pub phase: f64,
69    /// Iteration number (1-indexed)
70    pub iteration: usize,
71}
72
73/// Method for computing seasonal strength.
74#[derive(Debug, Clone, Copy, PartialEq, Eq)]
75pub enum StrengthMethod {
76    /// Variance decomposition: Var(seasonal) / Var(total)
77    Variance,
78    /// Spectral: power at seasonal frequencies / total power
79    Spectral,
80}
81
82/// A detected change point in seasonality.
83#[derive(Debug, Clone)]
84pub struct ChangePoint {
85    /// Time at which the change occurs
86    pub time: f64,
87    /// Type of change
88    pub change_type: ChangeType,
89    /// Seasonal strength before the change
90    pub strength_before: f64,
91    /// Seasonal strength after the change
92    pub strength_after: f64,
93}
94
95/// Type of seasonality change.
96#[derive(Debug, Clone, Copy, PartialEq, Eq)]
97pub enum ChangeType {
98    /// Series becomes seasonal
99    Onset,
100    /// Series stops being seasonal
101    Cessation,
102}
103
104/// Result of seasonality change detection.
105#[derive(Debug, Clone)]
106pub struct ChangeDetectionResult {
107    /// Detected change points
108    pub change_points: Vec<ChangePoint>,
109    /// Time-varying seasonal strength curve used for detection
110    pub strength_curve: Vec<f64>,
111}
112
113/// Result of instantaneous period estimation.
114#[derive(Debug, Clone)]
115pub struct InstantaneousPeriod {
116    /// Instantaneous period at each time point
117    pub period: Vec<f64>,
118    /// Instantaneous frequency at each time point
119    pub frequency: Vec<f64>,
120    /// Instantaneous amplitude (envelope) at each time point
121    pub amplitude: Vec<f64>,
122}
123
124/// Result of peak timing variability analysis.
125#[derive(Debug, Clone)]
126pub struct PeakTimingResult {
127    /// Peak times for each cycle
128    pub peak_times: Vec<f64>,
129    /// Peak values
130    pub peak_values: Vec<f64>,
131    /// Within-period timing (0-1 scale, e.g., day-of-year / 365)
132    pub normalized_timing: Vec<f64>,
133    /// Mean normalized timing
134    pub mean_timing: f64,
135    /// Standard deviation of normalized timing
136    pub std_timing: f64,
137    /// Range of normalized timing (max - min)
138    pub range_timing: f64,
139    /// Variability score (0 = stable, 1 = highly variable)
140    pub variability_score: f64,
141    /// Trend in timing (positive = peaks getting later)
142    pub timing_trend: f64,
143    /// Cycle indices (1-indexed)
144    pub cycle_indices: Vec<usize>,
145}
146
147/// Type of seasonality pattern.
148#[derive(Debug, Clone, Copy, PartialEq, Eq)]
149pub enum SeasonalType {
150    /// Regular peaks with consistent timing
151    StableSeasonal,
152    /// Regular peaks but timing shifts between cycles
153    VariableTiming,
154    /// Some cycles seasonal, some not
155    IntermittentSeasonal,
156    /// No clear seasonality
157    NonSeasonal,
158}
159
160/// Result of seasonality classification.
161#[derive(Debug, Clone)]
162pub struct SeasonalityClassification {
163    /// Whether the series is seasonal overall
164    pub is_seasonal: bool,
165    /// Whether peak timing is stable across cycles
166    pub has_stable_timing: bool,
167    /// Timing variability score (0 = stable, 1 = highly variable)
168    pub timing_variability: f64,
169    /// Overall seasonal strength
170    pub seasonal_strength: f64,
171    /// Per-cycle seasonal strength
172    pub cycle_strengths: Vec<f64>,
173    /// Indices of weak/missing seasons (0-indexed)
174    pub weak_seasons: Vec<usize>,
175    /// Classification type
176    pub classification: SeasonalType,
177    /// Peak timing analysis (if peaks were detected)
178    pub peak_timing: Option<PeakTimingResult>,
179}
180
181/// Method for automatic threshold selection.
182#[derive(Debug, Clone, Copy, PartialEq)]
183pub enum ThresholdMethod {
184    /// Fixed user-specified threshold
185    Fixed(f64),
186    /// Percentile of strength distribution
187    Percentile(f64),
188    /// Otsu's method (optimal bimodal separation)
189    Otsu,
190}
191
192/// Type of amplitude modulation pattern.
193#[derive(Debug, Clone, Copy, PartialEq, Eq)]
194pub enum ModulationType {
195    /// Constant amplitude (no modulation)
196    Stable,
197    /// Amplitude increases over time (seasonality emerges)
198    Emerging,
199    /// Amplitude decreases over time (seasonality fades)
200    Fading,
201    /// Amplitude varies non-monotonically
202    Oscillating,
203    /// No seasonality detected
204    NonSeasonal,
205}
206
207/// Result of amplitude modulation detection.
208#[derive(Debug, Clone)]
209pub struct AmplitudeModulationResult {
210    /// Whether seasonality is present (using robust spectral method)
211    pub is_seasonal: bool,
212    /// Overall seasonal strength (spectral method)
213    pub seasonal_strength: f64,
214    /// Whether amplitude modulation is detected
215    pub has_modulation: bool,
216    /// Type of amplitude modulation
217    pub modulation_type: ModulationType,
218    /// Coefficient of variation of time-varying strength (0 = stable, higher = more modulation)
219    pub modulation_score: f64,
220    /// Trend in amplitude (-1 to 1: negative = fading, positive = emerging)
221    pub amplitude_trend: f64,
222    /// Time-varying seasonal strength curve
223    pub strength_curve: Vec<f64>,
224    /// Time points corresponding to strength_curve
225    pub time_points: Vec<f64>,
226    /// Minimum strength in the curve
227    pub min_strength: f64,
228    /// Maximum strength in the curve
229    pub max_strength: f64,
230}
231
232/// Result of wavelet-based amplitude modulation detection.
233#[derive(Debug, Clone)]
234pub struct WaveletAmplitudeResult {
235    /// Whether seasonality is present
236    pub is_seasonal: bool,
237    /// Overall seasonal strength
238    pub seasonal_strength: f64,
239    /// Whether amplitude modulation is detected
240    pub has_modulation: bool,
241    /// Type of amplitude modulation
242    pub modulation_type: ModulationType,
243    /// Coefficient of variation of wavelet amplitude
244    pub modulation_score: f64,
245    /// Trend in amplitude (-1 to 1)
246    pub amplitude_trend: f64,
247    /// Wavelet amplitude at the seasonal frequency over time
248    pub wavelet_amplitude: Vec<f64>,
249    /// Time points corresponding to wavelet_amplitude
250    pub time_points: Vec<f64>,
251    /// Scale (period) used for wavelet analysis
252    pub scale: f64,
253}
254
255// ============================================================================
256// Internal helper functions
257// ============================================================================
258
259/// Compute mean curve from column-major data matrix.
260///
261/// # Arguments
262/// * `data` - Column-major matrix (n x m)
263/// * `n` - Number of samples (rows)
264/// * `m` - Number of evaluation points (columns)
265/// * `parallel` - Use parallel iteration (default: true)
266///
267/// # Returns
268/// Mean curve of length m
269#[inline]
270fn compute_mean_curve_impl(data: &[f64], n: usize, m: usize, parallel: bool) -> Vec<f64> {
271    if parallel && m >= 100 {
272        // Use parallel iteration for larger datasets
273        iter_maybe_parallel!(0..m)
274            .map(|j| {
275                let mut sum = 0.0;
276                for i in 0..n {
277                    sum += data[i + j * n];
278                }
279                sum / n as f64
280            })
281            .collect()
282    } else {
283        // Sequential for small datasets or when disabled
284        (0..m)
285            .map(|j| {
286                let mut sum = 0.0;
287                for i in 0..n {
288                    sum += data[i + j * n];
289                }
290                sum / n as f64
291            })
292            .collect()
293    }
294}
295
296/// Compute mean curve (parallel by default for m >= 100).
297#[inline]
298fn compute_mean_curve(data: &[f64], n: usize, m: usize) -> Vec<f64> {
299    compute_mean_curve_impl(data, n, m, true)
300}
301
302/// Compute interior bounds for edge-skipping (10% on each side).
303///
304/// Used to avoid edge effects in wavelet and other analyses.
305///
306/// # Arguments
307/// * `m` - Total number of points
308///
309/// # Returns
310/// `(interior_start, interior_end)` indices, or `None` if range is invalid
311#[inline]
312fn interior_bounds(m: usize) -> Option<(usize, usize)> {
313    let edge_skip = (m as f64 * 0.1) as usize;
314    let interior_start = edge_skip.min(m / 4);
315    let interior_end = m.saturating_sub(edge_skip).max(m * 3 / 4);
316
317    if interior_end <= interior_start {
318        None
319    } else {
320        Some((interior_start, interior_end))
321    }
322}
323
324/// Compute periodogram from data using FFT.
325/// Returns (frequencies, power) where frequencies are in cycles per unit time.
326fn periodogram(data: &[f64], argvals: &[f64]) -> (Vec<f64>, Vec<f64>) {
327    let n = data.len();
328    if n < 2 || argvals.len() != n {
329        return (Vec::new(), Vec::new());
330    }
331
332    let dt = (argvals[n - 1] - argvals[0]) / (n - 1) as f64;
333    let fs = 1.0 / dt; // Sampling frequency
334
335    let mut planner = FftPlanner::<f64>::new();
336    let fft = planner.plan_fft_forward(n);
337
338    let mut buffer: Vec<Complex<f64>> = data.iter().map(|&x| Complex::new(x, 0.0)).collect();
339    fft.process(&mut buffer);
340
341    // Compute power spectrum (one-sided)
342    let n_freq = n / 2 + 1;
343    let mut power = Vec::with_capacity(n_freq);
344    let mut frequencies = Vec::with_capacity(n_freq);
345
346    for k in 0..n_freq {
347        let freq = k as f64 * fs / n as f64;
348        frequencies.push(freq);
349
350        let p = buffer[k].norm_sqr() / (n as f64 * n as f64);
351        // Double power for non-DC and non-Nyquist frequencies (one-sided spectrum)
352        let p = if k > 0 && k < n / 2 { 2.0 * p } else { p };
353        power.push(p);
354    }
355
356    (frequencies, power)
357}
358
359/// Compute autocorrelation function up to max_lag.
360fn autocorrelation(data: &[f64], max_lag: usize) -> Vec<f64> {
361    let n = data.len();
362    if n == 0 {
363        return Vec::new();
364    }
365
366    let mean: f64 = data.iter().sum::<f64>() / n as f64;
367    let var: f64 = data.iter().map(|&x| (x - mean).powi(2)).sum::<f64>() / n as f64;
368
369    if var < 1e-15 {
370        return vec![1.0; max_lag.min(n) + 1];
371    }
372
373    let max_lag = max_lag.min(n - 1);
374    let mut acf = Vec::with_capacity(max_lag + 1);
375
376    for lag in 0..=max_lag {
377        let mut sum = 0.0;
378        for i in 0..(n - lag) {
379            sum += (data[i] - mean) * (data[i + lag] - mean);
380        }
381        acf.push(sum / (n as f64 * var));
382    }
383
384    acf
385}
386
387/// Find peaks in a 1D signal, returning indices.
388fn find_peaks_1d(signal: &[f64], min_distance: usize) -> Vec<usize> {
389    let n = signal.len();
390    if n < 3 {
391        return Vec::new();
392    }
393
394    let mut peaks = Vec::new();
395
396    for i in 1..(n - 1) {
397        if signal[i] > signal[i - 1] && signal[i] > signal[i + 1] {
398            // Check minimum distance from previous peak
399            if peaks.is_empty() || i - peaks[peaks.len() - 1] >= min_distance {
400                peaks.push(i);
401            } else if signal[i] > signal[peaks[peaks.len() - 1]] {
402                // Replace previous peak if this one is higher
403                peaks.pop();
404                peaks.push(i);
405            }
406        }
407    }
408
409    peaks
410}
411
412/// Compute prominence for a peak (height above surrounding valleys).
413fn compute_prominence(signal: &[f64], peak_idx: usize) -> f64 {
414    let n = signal.len();
415    let peak_val = signal[peak_idx];
416
417    // Find lowest point between peak and boundaries/higher peaks
418    let mut left_min = peak_val;
419    for i in (0..peak_idx).rev() {
420        if signal[i] >= peak_val {
421            break;
422        }
423        left_min = left_min.min(signal[i]);
424    }
425
426    let mut right_min = peak_val;
427    for i in (peak_idx + 1)..n {
428        if signal[i] >= peak_val {
429            break;
430        }
431        right_min = right_min.min(signal[i]);
432    }
433
434    peak_val - left_min.max(right_min)
435}
436
437/// Hilbert transform using FFT to compute analytic signal.
438///
439/// # Arguments
440/// * `signal` - Input real signal
441///
442/// # Returns
443/// Analytic signal as complex vector (real part = original, imaginary = Hilbert transform)
444pub fn hilbert_transform(signal: &[f64]) -> Vec<Complex<f64>> {
445    let n = signal.len();
446    if n == 0 {
447        return Vec::new();
448    }
449
450    let mut planner = FftPlanner::<f64>::new();
451    let fft_forward = planner.plan_fft_forward(n);
452    let fft_inverse = planner.plan_fft_inverse(n);
453
454    // Forward FFT
455    let mut buffer: Vec<Complex<f64>> = signal.iter().map(|&x| Complex::new(x, 0.0)).collect();
456    fft_forward.process(&mut buffer);
457
458    // Create analytic signal in frequency domain
459    // H[0] = 1, H[1..n/2] = 2, H[n/2] = 1 (if n even), H[n/2+1..] = 0
460    let half = n / 2;
461    for k in 1..half {
462        buffer[k] *= 2.0;
463    }
464    for k in (half + 1)..n {
465        buffer[k] = Complex::new(0.0, 0.0);
466    }
467
468    // Inverse FFT
469    fft_inverse.process(&mut buffer);
470
471    // Normalize
472    for c in buffer.iter_mut() {
473        *c /= n as f64;
474    }
475
476    buffer
477}
478
479/// Unwrap phase to remove 2Ï€ discontinuities.
480fn unwrap_phase(phase: &[f64]) -> Vec<f64> {
481    if phase.is_empty() {
482        return Vec::new();
483    }
484
485    let mut unwrapped = vec![phase[0]];
486    let mut cumulative_correction = 0.0;
487
488    for i in 1..phase.len() {
489        let diff = phase[i] - phase[i - 1];
490
491        // Check for wraparound
492        if diff > PI {
493            cumulative_correction -= 2.0 * PI;
494        } else if diff < -PI {
495            cumulative_correction += 2.0 * PI;
496        }
497
498        unwrapped.push(phase[i] + cumulative_correction);
499    }
500
501    unwrapped
502}
503
504/// Morlet wavelet function.
505///
506/// The Morlet wavelet is a complex exponential modulated by a Gaussian:
507/// ψ(t) = exp(i * ω₀ * t) * exp(-t² / 2)
508///
509/// where ω₀ is the central frequency (typically 6 for good time-frequency trade-off).
510fn morlet_wavelet(t: f64, omega0: f64) -> Complex<f64> {
511    let gaussian = (-t * t / 2.0).exp();
512    let oscillation = Complex::new((omega0 * t).cos(), (omega0 * t).sin());
513    oscillation * gaussian
514}
515
516/// Continuous Wavelet Transform at a single scale using Morlet wavelet.
517///
518/// Computes the wavelet coefficients at the specified scale (period) for all time points.
519/// Uses convolution in the time domain.
520///
521/// # Arguments
522/// * `signal` - Input signal
523/// * `argvals` - Time points
524/// * `scale` - Scale parameter (related to period)
525/// * `omega0` - Central frequency of Morlet wavelet (default: 6.0)
526///
527/// # Returns
528/// Complex wavelet coefficients at each time point
529#[allow(dead_code)]
530fn cwt_morlet(signal: &[f64], argvals: &[f64], scale: f64, omega0: f64) -> Vec<Complex<f64>> {
531    let n = signal.len();
532    if n == 0 || scale <= 0.0 {
533        return Vec::new();
534    }
535
536    let dt = (argvals[n - 1] - argvals[0]) / (n - 1) as f64;
537
538    // Compute wavelet coefficients via convolution
539    // W(a, b) = (1/sqrt(a)) * Σ x[k] * ψ*((t[k] - b) / a) * dt
540    let norm = 1.0 / scale.sqrt();
541
542    (0..n)
543        .map(|b| {
544            let mut sum = Complex::new(0.0, 0.0);
545            for k in 0..n {
546                let t_normalized = (argvals[k] - argvals[b]) / scale;
547                // Only compute within reasonable range (Gaussian decays quickly)
548                if t_normalized.abs() < 6.0 {
549                    let wavelet = morlet_wavelet(t_normalized, omega0);
550                    sum += signal[k] * wavelet.conj();
551                }
552            }
553            sum * norm * dt
554        })
555        .collect()
556}
557
558/// Continuous Wavelet Transform at a single scale using FFT (faster for large signals).
559///
560/// Uses the convolution theorem: CWT = IFFT(FFT(signal) * FFT(wavelet)*)
561fn cwt_morlet_fft(signal: &[f64], argvals: &[f64], scale: f64, omega0: f64) -> Vec<Complex<f64>> {
562    let n = signal.len();
563    if n == 0 || scale <= 0.0 {
564        return Vec::new();
565    }
566
567    let dt = (argvals[n - 1] - argvals[0]) / (n - 1) as f64;
568    let norm = 1.0 / scale.sqrt();
569
570    // Compute wavelet in time domain centered at t=0
571    let wavelet_time: Vec<Complex<f64>> = (0..n)
572        .map(|k| {
573            // Center the wavelet
574            let t = if k <= n / 2 {
575                k as f64 * dt / scale
576            } else {
577                (k as f64 - n as f64) * dt / scale
578            };
579            morlet_wavelet(t, omega0) * norm
580        })
581        .collect();
582
583    let mut planner = FftPlanner::<f64>::new();
584    let fft_forward = planner.plan_fft_forward(n);
585    let fft_inverse = planner.plan_fft_inverse(n);
586
587    // FFT of signal
588    let mut signal_fft: Vec<Complex<f64>> = signal.iter().map(|&x| Complex::new(x, 0.0)).collect();
589    fft_forward.process(&mut signal_fft);
590
591    // FFT of wavelet
592    let mut wavelet_fft = wavelet_time;
593    fft_forward.process(&mut wavelet_fft);
594
595    // Multiply in frequency domain (use conjugate of wavelet FFT for correlation)
596    let mut result: Vec<Complex<f64>> = signal_fft
597        .iter()
598        .zip(wavelet_fft.iter())
599        .map(|(s, w)| *s * w.conj())
600        .collect();
601
602    // Inverse FFT
603    fft_inverse.process(&mut result);
604
605    // Normalize and scale by dt
606    for c in result.iter_mut() {
607        *c *= dt / n as f64;
608    }
609
610    result
611}
612
613/// Compute Otsu's threshold for bimodal separation.
614///
615/// Finds the threshold that minimizes within-class variance (or equivalently
616/// maximizes between-class variance).
617fn otsu_threshold(values: &[f64]) -> f64 {
618    if values.is_empty() {
619        return 0.5;
620    }
621
622    // Filter NaN values
623    let valid: Vec<f64> = values.iter().copied().filter(|x| x.is_finite()).collect();
624    if valid.is_empty() {
625        return 0.5;
626    }
627
628    let min_val = valid.iter().cloned().fold(f64::INFINITY, f64::min);
629    let max_val = valid.iter().cloned().fold(f64::NEG_INFINITY, f64::max);
630
631    if (max_val - min_val).abs() < 1e-10 {
632        return (min_val + max_val) / 2.0;
633    }
634
635    // Create histogram with 256 bins
636    let n_bins = 256;
637    let bin_width = (max_val - min_val) / n_bins as f64;
638    let mut histogram = vec![0usize; n_bins];
639
640    for &v in &valid {
641        let bin = ((v - min_val) / bin_width).min(n_bins as f64 - 1.0) as usize;
642        histogram[bin] += 1;
643    }
644
645    let total = valid.len() as f64;
646    let mut sum_total = 0.0;
647    for (i, &count) in histogram.iter().enumerate() {
648        sum_total += i as f64 * count as f64;
649    }
650
651    let mut best_threshold = min_val;
652    let mut best_variance = 0.0;
653
654    let mut sum_b = 0.0;
655    let mut weight_b = 0.0;
656
657    for t in 0..n_bins {
658        weight_b += histogram[t] as f64;
659        if weight_b == 0.0 {
660            continue;
661        }
662
663        let weight_f = total - weight_b;
664        if weight_f == 0.0 {
665            break;
666        }
667
668        sum_b += t as f64 * histogram[t] as f64;
669
670        let mean_b = sum_b / weight_b;
671        let mean_f = (sum_total - sum_b) / weight_f;
672
673        // Between-class variance
674        let variance = weight_b * weight_f * (mean_b - mean_f).powi(2);
675
676        if variance > best_variance {
677            best_variance = variance;
678            best_threshold = min_val + (t as f64 + 0.5) * bin_width;
679        }
680    }
681
682    best_threshold
683}
684
685/// Compute linear regression slope (simple OLS).
686fn linear_slope(x: &[f64], y: &[f64]) -> f64 {
687    if x.len() != y.len() || x.len() < 2 {
688        return 0.0;
689    }
690
691    let n = x.len() as f64;
692    let mean_x: f64 = x.iter().sum::<f64>() / n;
693    let mean_y: f64 = y.iter().sum::<f64>() / n;
694
695    let mut num = 0.0;
696    let mut den = 0.0;
697
698    for (&xi, &yi) in x.iter().zip(y.iter()) {
699        num += (xi - mean_x) * (yi - mean_y);
700        den += (xi - mean_x).powi(2);
701    }
702
703    if den.abs() < 1e-15 {
704        0.0
705    } else {
706        num / den
707    }
708}
709
710// ============================================================================
711// Period Estimation
712// ============================================================================
713
714/// Estimate period using FFT periodogram.
715///
716/// Finds the dominant frequency in the periodogram (excluding DC) and
717/// returns the corresponding period.
718///
719/// # Arguments
720/// * `data` - Column-major matrix (n x m)
721/// * `n` - Number of samples
722/// * `m` - Number of evaluation points
723/// * `argvals` - Evaluation points (time values)
724///
725/// # Returns
726/// Period estimate with confidence measure
727pub fn estimate_period_fft(data: &[f64], n: usize, m: usize, argvals: &[f64]) -> PeriodEstimate {
728    if n == 0 || m < 4 || argvals.len() != m {
729        return PeriodEstimate {
730            period: f64::NAN,
731            frequency: f64::NAN,
732            power: 0.0,
733            confidence: 0.0,
734        };
735    }
736
737    // Compute mean curve first
738    let mean_curve = compute_mean_curve(data, n, m);
739
740    let (frequencies, power) = periodogram(&mean_curve, argvals);
741
742    if frequencies.len() < 2 {
743        return PeriodEstimate {
744            period: f64::NAN,
745            frequency: f64::NAN,
746            power: 0.0,
747            confidence: 0.0,
748        };
749    }
750
751    // Find peak in power spectrum (skip DC component at index 0)
752    let mut max_power = 0.0;
753    let mut max_idx = 1;
754    for (i, &p) in power.iter().enumerate().skip(1) {
755        if p > max_power {
756            max_power = p;
757            max_idx = i;
758        }
759    }
760
761    let dominant_freq = frequencies[max_idx];
762    let period = if dominant_freq > 1e-15 {
763        1.0 / dominant_freq
764    } else {
765        f64::INFINITY
766    };
767
768    // Confidence: ratio of peak power to mean power (excluding DC)
769    let mean_power: f64 = power.iter().skip(1).sum::<f64>() / (power.len() - 1) as f64;
770    let confidence = if mean_power > 1e-15 {
771        max_power / mean_power
772    } else {
773        0.0
774    };
775
776    PeriodEstimate {
777        period,
778        frequency: dominant_freq,
779        power: max_power,
780        confidence,
781    }
782}
783
784/// Estimate period using autocorrelation function.
785///
786/// Finds the first significant peak in the ACF after lag 0.
787///
788/// # Arguments
789/// * `data` - Column-major matrix (n x m)
790/// * `n` - Number of samples
791/// * `m` - Number of evaluation points
792/// * `argvals` - Evaluation points
793/// * `max_lag` - Maximum lag to consider (in number of points)
794pub fn estimate_period_acf(
795    data: &[f64],
796    n: usize,
797    m: usize,
798    argvals: &[f64],
799    max_lag: usize,
800) -> PeriodEstimate {
801    if n == 0 || m < 4 || argvals.len() != m {
802        return PeriodEstimate {
803            period: f64::NAN,
804            frequency: f64::NAN,
805            power: 0.0,
806            confidence: 0.0,
807        };
808    }
809
810    // Compute mean curve
811    let mean_curve = compute_mean_curve(data, n, m);
812
813    let acf = autocorrelation(&mean_curve, max_lag);
814
815    // Find first peak after lag 0 (skip first few lags to avoid finding lag 0)
816    let min_lag = 2;
817    let peaks = find_peaks_1d(&acf[min_lag..], 1);
818
819    if peaks.is_empty() {
820        return PeriodEstimate {
821            period: f64::NAN,
822            frequency: f64::NAN,
823            power: 0.0,
824            confidence: 0.0,
825        };
826    }
827
828    let peak_lag = peaks[0] + min_lag;
829    let dt = (argvals[m - 1] - argvals[0]) / (m - 1) as f64;
830    let period = peak_lag as f64 * dt;
831    let frequency = if period > 1e-15 { 1.0 / period } else { 0.0 };
832
833    PeriodEstimate {
834        period,
835        frequency,
836        power: acf[peak_lag],
837        confidence: acf[peak_lag].abs(),
838    }
839}
840
841/// Estimate period via Fourier regression grid search.
842///
843/// Tests multiple candidate periods and selects the one that minimizes
844/// the reconstruction error (similar to GCV).
845///
846/// # Arguments
847/// * `data` - Column-major matrix (n x m)
848/// * `n` - Number of samples
849/// * `m` - Number of evaluation points
850/// * `argvals` - Evaluation points
851/// * `period_min` - Minimum period to test
852/// * `period_max` - Maximum period to test
853/// * `n_candidates` - Number of candidate periods to test
854/// * `n_harmonics` - Number of Fourier harmonics to use
855pub fn estimate_period_regression(
856    data: &[f64],
857    n: usize,
858    m: usize,
859    argvals: &[f64],
860    period_min: f64,
861    period_max: f64,
862    n_candidates: usize,
863    n_harmonics: usize,
864) -> PeriodEstimate {
865    if n == 0 || m < 4 || argvals.len() != m || period_min >= period_max || n_candidates < 2 {
866        return PeriodEstimate {
867            period: f64::NAN,
868            frequency: f64::NAN,
869            power: 0.0,
870            confidence: 0.0,
871        };
872    }
873
874    // Compute mean curve
875    let mean_curve = compute_mean_curve(data, n, m);
876
877    let nbasis = 1 + 2 * n_harmonics;
878
879    // Grid search over candidate periods
880    let candidates: Vec<f64> = (0..n_candidates)
881        .map(|i| period_min + (period_max - period_min) * i as f64 / (n_candidates - 1) as f64)
882        .collect();
883
884    let results: Vec<(f64, f64)> = slice_maybe_parallel!(candidates)
885        .map(|&period| {
886            let basis = fourier_basis_with_period(argvals, nbasis, period);
887
888            // Simple least squares fit
889            let mut rss = 0.0;
890            for j in 0..m {
891                let mut fitted = 0.0;
892                // Simple: use mean of basis function times data as rough fit
893                for k in 0..nbasis {
894                    let b_val = basis[j + k * m];
895                    let coef: f64 = (0..m)
896                        .map(|l| mean_curve[l] * basis[l + k * m])
897                        .sum::<f64>()
898                        / (0..m)
899                            .map(|l| basis[l + k * m].powi(2))
900                            .sum::<f64>()
901                            .max(1e-15);
902                    fitted += coef * b_val;
903                }
904                let resid = mean_curve[j] - fitted;
905                rss += resid * resid;
906            }
907
908            (period, rss)
909        })
910        .collect();
911
912    // Find period with minimum RSS
913    let (best_period, min_rss) = results
914        .iter()
915        .min_by(|a, b| a.1.partial_cmp(&b.1).unwrap_or(std::cmp::Ordering::Equal))
916        .cloned()
917        .unwrap_or((f64::NAN, f64::INFINITY));
918
919    // Confidence based on how much better the best is vs average
920    let mean_rss: f64 = results.iter().map(|(_, r)| r).sum::<f64>() / results.len() as f64;
921    let confidence = if min_rss > 1e-15 {
922        (mean_rss / min_rss).min(10.0)
923    } else {
924        10.0
925    };
926
927    PeriodEstimate {
928        period: best_period,
929        frequency: if best_period > 1e-15 {
930            1.0 / best_period
931        } else {
932            0.0
933        },
934        power: 1.0 - min_rss / mean_rss,
935        confidence,
936    }
937}
938
939/// Detect multiple concurrent periodicities using iterative residual subtraction.
940///
941/// This function iteratively:
942/// 1. Estimates the dominant period using FFT
943/// 2. Checks both FFT confidence and seasonal strength as stopping criteria
944/// 3. Computes the amplitude and phase of the sinusoidal component
945/// 4. Subtracts the fitted sinusoid from the signal
946/// 5. Repeats on the residual until stopping criteria are met
947///
948/// # Arguments
949/// * `data` - Column-major matrix (n x m)
950/// * `n` - Number of samples
951/// * `m` - Number of evaluation points
952/// * `argvals` - Evaluation points
953/// * `max_periods` - Maximum number of periods to detect
954/// * `min_confidence` - Minimum FFT confidence to continue (default: 0.4)
955/// * `min_strength` - Minimum seasonal strength to continue (default: 0.15)
956///
957/// # Returns
958/// Vector of detected periods with their properties
959pub fn detect_multiple_periods(
960    data: &[f64],
961    n: usize,
962    m: usize,
963    argvals: &[f64],
964    max_periods: usize,
965    min_confidence: f64,
966    min_strength: f64,
967) -> Vec<DetectedPeriod> {
968    if n == 0 || m < 4 || argvals.len() != m || max_periods == 0 {
969        return Vec::new();
970    }
971
972    // Compute mean curve
973    let mean_curve = compute_mean_curve(data, n, m);
974
975    let mut residual = mean_curve.clone();
976    let mut detected = Vec::with_capacity(max_periods);
977
978    for iteration in 1..=max_periods {
979        // Create single-sample data from residual
980        let residual_data: Vec<f64> = residual.clone();
981
982        // Estimate period
983        let est = estimate_period_fft(&residual_data, 1, m, argvals);
984
985        if est.confidence < min_confidence || est.period.is_nan() || est.period.is_infinite() {
986            break;
987        }
988
989        // Check seasonal strength at detected period
990        let strength = seasonal_strength_variance(&residual_data, 1, m, argvals, est.period, 3);
991
992        if strength < min_strength || strength.is_nan() {
993            break;
994        }
995
996        // Compute amplitude and phase using least squares fit
997        let omega = 2.0 * PI / est.period;
998        let mut cos_sum = 0.0;
999        let mut sin_sum = 0.0;
1000
1001        for (j, &t) in argvals.iter().enumerate() {
1002            cos_sum += residual[j] * (omega * t).cos();
1003            sin_sum += residual[j] * (omega * t).sin();
1004        }
1005
1006        let a = 2.0 * cos_sum / m as f64; // Cosine coefficient
1007        let b = 2.0 * sin_sum / m as f64; // Sine coefficient
1008        let amplitude = (a * a + b * b).sqrt();
1009        let phase = b.atan2(a);
1010
1011        detected.push(DetectedPeriod {
1012            period: est.period,
1013            confidence: est.confidence,
1014            strength,
1015            amplitude,
1016            phase,
1017            iteration,
1018        });
1019
1020        // Subtract fitted sinusoid from residual
1021        for (j, &t) in argvals.iter().enumerate() {
1022            let fitted = a * (omega * t).cos() + b * (omega * t).sin();
1023            residual[j] -= fitted;
1024        }
1025    }
1026
1027    detected
1028}
1029
1030// ============================================================================
1031// Peak Detection
1032// ============================================================================
1033
1034/// Detect peaks in functional data.
1035///
1036/// Uses derivative zero-crossings to find local maxima, with optional
1037/// Fourier basis smoothing and filtering by minimum distance and prominence.
1038///
1039/// # Arguments
1040/// * `data` - Column-major matrix (n x m)
1041/// * `n` - Number of samples
1042/// * `m` - Number of evaluation points
1043/// * `argvals` - Evaluation points
1044/// * `min_distance` - Minimum time between peaks (None = no constraint)
1045/// * `min_prominence` - Minimum prominence (0-1 scale, None = no filter)
1046/// * `smooth_first` - Whether to smooth data before peak detection using Fourier basis
1047/// * `smooth_nbasis` - Number of Fourier basis functions. If None and smooth_first=true,
1048///   uses GCV to automatically select optimal nbasis (range 5-25).
1049pub fn detect_peaks(
1050    data: &[f64],
1051    n: usize,
1052    m: usize,
1053    argvals: &[f64],
1054    min_distance: Option<f64>,
1055    min_prominence: Option<f64>,
1056    smooth_first: bool,
1057    smooth_nbasis: Option<usize>,
1058) -> PeakDetectionResult {
1059    if n == 0 || m < 3 || argvals.len() != m {
1060        return PeakDetectionResult {
1061            peaks: Vec::new(),
1062            inter_peak_distances: Vec::new(),
1063            mean_period: f64::NAN,
1064        };
1065    }
1066
1067    let dt = (argvals[m - 1] - argvals[0]) / (m - 1) as f64;
1068    let min_dist_points = min_distance.map(|d| (d / dt).round() as usize).unwrap_or(1);
1069
1070    // Optionally smooth the data using Fourier basis
1071    let work_data = if smooth_first {
1072        // Determine nbasis: use provided value or select via GCV
1073        let nbasis = smooth_nbasis
1074            .unwrap_or_else(|| crate::basis::select_fourier_nbasis_gcv(data, n, m, argvals, 5, 25));
1075
1076        // Use Fourier basis smoothing
1077        if let Some(result) = crate::basis::fourier_fit_1d(data, n, m, argvals, nbasis) {
1078            result.fitted
1079        } else {
1080            data.to_vec()
1081        }
1082    } else {
1083        data.to_vec()
1084    };
1085
1086    // Compute first derivative
1087    let deriv1 = deriv_1d(&work_data, n, m, argvals, 1);
1088
1089    // Compute data range for prominence normalization
1090    let data_range: f64 = {
1091        let mut min_val = f64::INFINITY;
1092        let mut max_val = f64::NEG_INFINITY;
1093        for &v in work_data.iter() {
1094            min_val = min_val.min(v);
1095            max_val = max_val.max(v);
1096        }
1097        (max_val - min_val).max(1e-15)
1098    };
1099
1100    // Find peaks for each sample
1101    let results: Vec<(Vec<Peak>, Vec<f64>)> = iter_maybe_parallel!(0..n)
1102        .map(|i| {
1103            // Extract curve and derivative
1104            let curve: Vec<f64> = (0..m).map(|j| work_data[i + j * n]).collect();
1105            let d1: Vec<f64> = (0..m).map(|j| deriv1[i + j * n]).collect();
1106
1107            // Find zero crossings where derivative goes from positive to negative
1108            let mut peak_indices = Vec::new();
1109            for j in 1..m {
1110                if d1[j - 1] > 0.0 && d1[j] <= 0.0 {
1111                    // Interpolate to find more precise location
1112                    let idx = if (d1[j - 1] - d1[j]).abs() > 1e-15 {
1113                        j - 1
1114                    } else {
1115                        j
1116                    };
1117
1118                    // Check minimum distance
1119                    if peak_indices.is_empty()
1120                        || idx - peak_indices[peak_indices.len() - 1] >= min_dist_points
1121                    {
1122                        peak_indices.push(idx);
1123                    }
1124                }
1125            }
1126
1127            // Build peaks with prominence
1128            let mut peaks: Vec<Peak> = peak_indices
1129                .iter()
1130                .map(|&idx| {
1131                    let prominence = compute_prominence(&curve, idx) / data_range;
1132                    Peak {
1133                        time: argvals[idx],
1134                        value: curve[idx],
1135                        prominence,
1136                    }
1137                })
1138                .collect();
1139
1140            // Filter by prominence
1141            if let Some(min_prom) = min_prominence {
1142                peaks.retain(|p| p.prominence >= min_prom);
1143            }
1144
1145            // Compute inter-peak distances
1146            let distances: Vec<f64> = peaks.windows(2).map(|w| w[1].time - w[0].time).collect();
1147
1148            (peaks, distances)
1149        })
1150        .collect();
1151
1152    let peaks: Vec<Vec<Peak>> = results.iter().map(|(p, _)| p.clone()).collect();
1153    let inter_peak_distances: Vec<Vec<f64>> = results.iter().map(|(_, d)| d.clone()).collect();
1154
1155    // Compute mean period from all inter-peak distances
1156    let all_distances: Vec<f64> = inter_peak_distances.iter().flatten().cloned().collect();
1157    let mean_period = if all_distances.is_empty() {
1158        f64::NAN
1159    } else {
1160        all_distances.iter().sum::<f64>() / all_distances.len() as f64
1161    };
1162
1163    PeakDetectionResult {
1164        peaks,
1165        inter_peak_distances,
1166        mean_period,
1167    }
1168}
1169
1170// ============================================================================
1171// Seasonal Strength
1172// ============================================================================
1173
1174/// Measure seasonal strength using variance decomposition.
1175///
1176/// Computes SS = Var(seasonal_component) / Var(total) where the seasonal
1177/// component is extracted using Fourier basis.
1178///
1179/// # Arguments
1180/// * `data` - Column-major matrix (n x m)
1181/// * `n` - Number of samples
1182/// * `m` - Number of evaluation points
1183/// * `argvals` - Evaluation points
1184/// * `period` - Seasonal period
1185/// * `n_harmonics` - Number of Fourier harmonics to use
1186///
1187/// # Returns
1188/// Seasonal strength in [0, 1]
1189pub fn seasonal_strength_variance(
1190    data: &[f64],
1191    n: usize,
1192    m: usize,
1193    argvals: &[f64],
1194    period: f64,
1195    n_harmonics: usize,
1196) -> f64 {
1197    if n == 0 || m < 4 || argvals.len() != m || period <= 0.0 {
1198        return f64::NAN;
1199    }
1200
1201    // Compute mean curve
1202    let mean_curve = compute_mean_curve(data, n, m);
1203
1204    // Total variance
1205    let global_mean: f64 = mean_curve.iter().sum::<f64>() / m as f64;
1206    let total_var: f64 = mean_curve
1207        .iter()
1208        .map(|&x| (x - global_mean).powi(2))
1209        .sum::<f64>()
1210        / m as f64;
1211
1212    if total_var < 1e-15 {
1213        return 0.0;
1214    }
1215
1216    // Fit Fourier basis to extract seasonal component
1217    let nbasis = 1 + 2 * n_harmonics;
1218    let basis = fourier_basis_with_period(argvals, nbasis, period);
1219
1220    // Project data onto basis (simple least squares for mean curve)
1221    let mut seasonal = vec![0.0; m];
1222    for k in 1..nbasis {
1223        // Skip DC component
1224        let b_sum: f64 = (0..m).map(|j| basis[j + k * m].powi(2)).sum();
1225        if b_sum > 1e-15 {
1226            let coef: f64 = (0..m)
1227                .map(|j| mean_curve[j] * basis[j + k * m])
1228                .sum::<f64>()
1229                / b_sum;
1230            for j in 0..m {
1231                seasonal[j] += coef * basis[j + k * m];
1232            }
1233        }
1234    }
1235
1236    // Seasonal variance
1237    let seasonal_mean: f64 = seasonal.iter().sum::<f64>() / m as f64;
1238    let seasonal_var: f64 = seasonal
1239        .iter()
1240        .map(|&x| (x - seasonal_mean).powi(2))
1241        .sum::<f64>()
1242        / m as f64;
1243
1244    (seasonal_var / total_var).min(1.0)
1245}
1246
1247/// Measure seasonal strength using spectral method.
1248///
1249/// Computes SS = power at seasonal frequencies / total power.
1250///
1251/// # Arguments
1252/// * `data` - Column-major matrix (n x m)
1253/// * `n` - Number of samples
1254/// * `m` - Number of evaluation points
1255/// * `argvals` - Evaluation points
1256/// * `period` - Seasonal period
1257pub fn seasonal_strength_spectral(
1258    data: &[f64],
1259    n: usize,
1260    m: usize,
1261    argvals: &[f64],
1262    period: f64,
1263) -> f64 {
1264    if n == 0 || m < 4 || argvals.len() != m || period <= 0.0 {
1265        return f64::NAN;
1266    }
1267
1268    // Compute mean curve
1269    let mean_curve = compute_mean_curve(data, n, m);
1270
1271    let (frequencies, power) = periodogram(&mean_curve, argvals);
1272
1273    if frequencies.len() < 2 {
1274        return f64::NAN;
1275    }
1276
1277    let fundamental_freq = 1.0 / period;
1278
1279    // Sum power at seasonal frequencies (fundamental and harmonics)
1280    let _freq_tol = fundamental_freq * 0.1; // 10% tolerance (for future use)
1281    let mut seasonal_power = 0.0;
1282    let mut total_power = 0.0;
1283
1284    for (i, (&freq, &p)) in frequencies.iter().zip(power.iter()).enumerate() {
1285        if i == 0 {
1286            continue;
1287        } // Skip DC
1288
1289        total_power += p;
1290
1291        // Check if frequency is near a harmonic of fundamental
1292        let ratio = freq / fundamental_freq;
1293        let nearest_harmonic = ratio.round();
1294        if (ratio - nearest_harmonic).abs() < 0.1 && nearest_harmonic >= 1.0 {
1295            seasonal_power += p;
1296        }
1297    }
1298
1299    if total_power < 1e-15 {
1300        return 0.0;
1301    }
1302
1303    (seasonal_power / total_power).min(1.0)
1304}
1305
1306/// Compute seasonal strength using Morlet wavelet power at the target period.
1307///
1308/// This method uses the Continuous Wavelet Transform (CWT) with a Morlet wavelet
1309/// to measure power at the specified seasonal period. Unlike spectral methods,
1310/// wavelets provide time-localized frequency information.
1311///
1312/// # Arguments
1313/// * `data` - Column-major matrix (n x m)
1314/// * `n` - Number of samples
1315/// * `m` - Number of evaluation points
1316/// * `argvals` - Evaluation points
1317/// * `period` - Seasonal period in argvals units
1318///
1319/// # Returns
1320/// Seasonal strength as ratio of wavelet power to total variance (0 to 1)
1321///
1322/// # Notes
1323/// - Uses Morlet wavelet with ω₀ = 6 (standard choice)
1324/// - Scale is computed as: scale = period * ω₀ / (2π)
1325/// - Strength is computed over the interior 80% of the signal to avoid edge effects
1326pub fn seasonal_strength_wavelet(
1327    data: &[f64],
1328    n: usize,
1329    m: usize,
1330    argvals: &[f64],
1331    period: f64,
1332) -> f64 {
1333    if n == 0 || m < 4 || argvals.len() != m || period <= 0.0 {
1334        return f64::NAN;
1335    }
1336
1337    // Compute mean curve
1338    let mean_curve = compute_mean_curve(data, n, m);
1339
1340    // Remove DC component
1341    let dc: f64 = mean_curve.iter().sum::<f64>() / m as f64;
1342    let detrended: Vec<f64> = mean_curve.iter().map(|&x| x - dc).collect();
1343
1344    // Compute total variance
1345    let total_variance: f64 = detrended.iter().map(|&x| x * x).sum::<f64>() / m as f64;
1346
1347    if total_variance < 1e-15 {
1348        return 0.0;
1349    }
1350
1351    // Compute wavelet transform at the seasonal scale
1352    let omega0 = 6.0;
1353    let scale = period * omega0 / (2.0 * PI);
1354    let wavelet_coeffs = cwt_morlet_fft(&detrended, argvals, scale, omega0);
1355
1356    if wavelet_coeffs.is_empty() {
1357        return f64::NAN;
1358    }
1359
1360    // Compute wavelet power, skipping edges (10% on each side)
1361    let (interior_start, interior_end) = match interior_bounds(m) {
1362        Some(bounds) => bounds,
1363        None => return f64::NAN,
1364    };
1365
1366    let wavelet_power: f64 = wavelet_coeffs[interior_start..interior_end]
1367        .iter()
1368        .map(|c| c.norm_sqr())
1369        .sum::<f64>()
1370        / (interior_end - interior_start) as f64;
1371
1372    // Return ratio of wavelet power to total variance
1373    // Normalize so that a pure sine at the target period gives ~1.0
1374    (wavelet_power / total_variance).sqrt().min(1.0)
1375}
1376
1377/// Compute time-varying seasonal strength using sliding windows.
1378///
1379/// # Arguments
1380/// * `data` - Column-major matrix (n x m)
1381/// * `n` - Number of samples
1382/// * `m` - Number of evaluation points
1383/// * `argvals` - Evaluation points
1384/// * `period` - Seasonal period
1385/// * `window_size` - Window width (recommended: 2 * period)
1386/// * `method` - Method for computing strength (Variance or Spectral)
1387///
1388/// # Returns
1389/// Seasonal strength at each time point
1390pub fn seasonal_strength_windowed(
1391    data: &[f64],
1392    n: usize,
1393    m: usize,
1394    argvals: &[f64],
1395    period: f64,
1396    window_size: f64,
1397    method: StrengthMethod,
1398) -> Vec<f64> {
1399    if n == 0 || m < 4 || argvals.len() != m || period <= 0.0 || window_size <= 0.0 {
1400        return Vec::new();
1401    }
1402
1403    let dt = (argvals[m - 1] - argvals[0]) / (m - 1) as f64;
1404    let half_window_points = ((window_size / 2.0) / dt).round() as usize;
1405
1406    // Compute mean curve
1407    let mean_curve = compute_mean_curve(data, n, m);
1408
1409    iter_maybe_parallel!(0..m)
1410        .map(|center| {
1411            let start = center.saturating_sub(half_window_points);
1412            let end = (center + half_window_points + 1).min(m);
1413            let window_m = end - start;
1414
1415            if window_m < 4 {
1416                return f64::NAN;
1417            }
1418
1419            let window_data: Vec<f64> = mean_curve[start..end].to_vec();
1420            let window_argvals: Vec<f64> = argvals[start..end].to_vec();
1421
1422            // Create single-sample data for the strength functions
1423            let single_data = window_data.clone();
1424
1425            match method {
1426                StrengthMethod::Variance => seasonal_strength_variance(
1427                    &single_data,
1428                    1,
1429                    window_m,
1430                    &window_argvals,
1431                    period,
1432                    3,
1433                ),
1434                StrengthMethod::Spectral => {
1435                    seasonal_strength_spectral(&single_data, 1, window_m, &window_argvals, period)
1436                }
1437            }
1438        })
1439        .collect()
1440}
1441
1442// ============================================================================
1443// Seasonality Change Detection
1444// ============================================================================
1445
1446/// Detect changes in seasonality.
1447///
1448/// Monitors time-varying seasonal strength and detects threshold crossings
1449/// that indicate onset or cessation of seasonality.
1450///
1451/// # Arguments
1452/// * `data` - Column-major matrix (n x m)
1453/// * `n` - Number of samples
1454/// * `m` - Number of evaluation points
1455/// * `argvals` - Evaluation points
1456/// * `period` - Seasonal period
1457/// * `threshold` - SS threshold for seasonal/non-seasonal (e.g., 0.3)
1458/// * `window_size` - Window size for local strength estimation
1459/// * `min_duration` - Minimum duration to confirm a change
1460pub fn detect_seasonality_changes(
1461    data: &[f64],
1462    n: usize,
1463    m: usize,
1464    argvals: &[f64],
1465    period: f64,
1466    threshold: f64,
1467    window_size: f64,
1468    min_duration: f64,
1469) -> ChangeDetectionResult {
1470    if n == 0 || m < 4 || argvals.len() != m {
1471        return ChangeDetectionResult {
1472            change_points: Vec::new(),
1473            strength_curve: Vec::new(),
1474        };
1475    }
1476
1477    // Compute time-varying seasonal strength
1478    let strength_curve = seasonal_strength_windowed(
1479        data,
1480        n,
1481        m,
1482        argvals,
1483        period,
1484        window_size,
1485        StrengthMethod::Variance,
1486    );
1487
1488    if strength_curve.is_empty() {
1489        return ChangeDetectionResult {
1490            change_points: Vec::new(),
1491            strength_curve: Vec::new(),
1492        };
1493    }
1494
1495    let dt = (argvals[m - 1] - argvals[0]) / (m - 1) as f64;
1496    let min_dur_points = (min_duration / dt).round() as usize;
1497
1498    // Detect threshold crossings
1499    let mut change_points = Vec::new();
1500    let mut in_seasonal = strength_curve[0] > threshold;
1501    let mut last_change_idx: Option<usize> = None;
1502
1503    for (i, &ss) in strength_curve.iter().enumerate().skip(1) {
1504        if ss.is_nan() {
1505            continue;
1506        }
1507
1508        let now_seasonal = ss > threshold;
1509
1510        if now_seasonal != in_seasonal {
1511            // Potential change point
1512            if let Some(last_idx) = last_change_idx {
1513                if i - last_idx < min_dur_points {
1514                    continue; // Too soon after last change
1515                }
1516            }
1517
1518            let change_type = if now_seasonal {
1519                ChangeType::Onset
1520            } else {
1521                ChangeType::Cessation
1522            };
1523
1524            let strength_before = if i > 0 && !strength_curve[i - 1].is_nan() {
1525                strength_curve[i - 1]
1526            } else {
1527                ss
1528            };
1529
1530            change_points.push(ChangePoint {
1531                time: argvals[i],
1532                change_type,
1533                strength_before,
1534                strength_after: ss,
1535            });
1536
1537            in_seasonal = now_seasonal;
1538            last_change_idx = Some(i);
1539        }
1540    }
1541
1542    ChangeDetectionResult {
1543        change_points,
1544        strength_curve,
1545    }
1546}
1547
1548// ============================================================================
1549// Amplitude Modulation Detection
1550// ============================================================================
1551
1552/// Detect amplitude modulation in seasonal time series.
1553///
1554/// This function first checks if seasonality exists using the spectral method
1555/// (which is robust to amplitude modulation), then uses Hilbert transform to
1556/// extract the amplitude envelope and analyze modulation patterns.
1557///
1558/// # Arguments
1559/// * `data` - Column-major matrix (n x m)
1560/// * `n` - Number of samples
1561/// * `m` - Number of evaluation points
1562/// * `argvals` - Evaluation points
1563/// * `period` - Seasonal period in argvals units
1564/// * `modulation_threshold` - CV threshold for detecting modulation (default: 0.15)
1565/// * `seasonality_threshold` - Strength threshold for seasonality (default: 0.3)
1566///
1567/// # Returns
1568/// `AmplitudeModulationResult` containing detection results and diagnostics
1569///
1570/// # Example
1571/// ```ignore
1572/// let result = detect_amplitude_modulation(
1573///     &data, n, m, &argvals,
1574///     period,
1575///     0.15,          // CV > 0.15 indicates modulation
1576///     0.3,           // strength > 0.3 indicates seasonality
1577/// );
1578/// if result.has_modulation {
1579///     match result.modulation_type {
1580///         ModulationType::Emerging => println!("Seasonality is emerging"),
1581///         ModulationType::Fading => println!("Seasonality is fading"),
1582///         _ => {}
1583///     }
1584/// }
1585/// ```
1586pub fn detect_amplitude_modulation(
1587    data: &[f64],
1588    n: usize,
1589    m: usize,
1590    argvals: &[f64],
1591    period: f64,
1592    modulation_threshold: f64,
1593    seasonality_threshold: f64,
1594) -> AmplitudeModulationResult {
1595    // Default result for invalid input
1596    let empty_result = AmplitudeModulationResult {
1597        is_seasonal: false,
1598        seasonal_strength: 0.0,
1599        has_modulation: false,
1600        modulation_type: ModulationType::NonSeasonal,
1601        modulation_score: 0.0,
1602        amplitude_trend: 0.0,
1603        strength_curve: Vec::new(),
1604        time_points: Vec::new(),
1605        min_strength: 0.0,
1606        max_strength: 0.0,
1607    };
1608
1609    if n == 0 || m < 4 || argvals.len() != m || period <= 0.0 {
1610        return empty_result;
1611    }
1612
1613    // Step 1: Check if seasonality exists using spectral method (robust to AM)
1614    let overall_strength = seasonal_strength_spectral(data, n, m, argvals, period);
1615
1616    if overall_strength < seasonality_threshold {
1617        return AmplitudeModulationResult {
1618            is_seasonal: false,
1619            seasonal_strength: overall_strength,
1620            has_modulation: false,
1621            modulation_type: ModulationType::NonSeasonal,
1622            modulation_score: 0.0,
1623            amplitude_trend: 0.0,
1624            strength_curve: Vec::new(),
1625            time_points: argvals.to_vec(),
1626            min_strength: 0.0,
1627            max_strength: 0.0,
1628        };
1629    }
1630
1631    // Step 2: Compute mean curve
1632    let mean_curve = compute_mean_curve(data, n, m);
1633
1634    // Step 3: Use Hilbert transform to get amplitude envelope
1635    let dc: f64 = mean_curve.iter().sum::<f64>() / m as f64;
1636    let detrended: Vec<f64> = mean_curve.iter().map(|&x| x - dc).collect();
1637    let analytic = hilbert_transform(&detrended);
1638    let envelope: Vec<f64> = analytic.iter().map(|c| c.norm()).collect();
1639
1640    if envelope.is_empty() {
1641        return AmplitudeModulationResult {
1642            is_seasonal: true,
1643            seasonal_strength: overall_strength,
1644            has_modulation: false,
1645            modulation_type: ModulationType::Stable,
1646            modulation_score: 0.0,
1647            amplitude_trend: 0.0,
1648            strength_curve: Vec::new(),
1649            time_points: argvals.to_vec(),
1650            min_strength: 0.0,
1651            max_strength: 0.0,
1652        };
1653    }
1654
1655    // Step 4: Smooth the envelope to reduce high-frequency noise
1656    // Use simple moving average with window size based on period
1657    let dt = (argvals[m - 1] - argvals[0]) / (m - 1) as f64;
1658    let smooth_window = ((period / dt) as usize).max(3);
1659    let half_window = smooth_window / 2;
1660
1661    let smoothed_envelope: Vec<f64> = (0..m)
1662        .map(|i| {
1663            let start = i.saturating_sub(half_window);
1664            let end = (i + half_window + 1).min(m);
1665            let sum: f64 = envelope[start..end].iter().sum();
1666            sum / (end - start) as f64
1667        })
1668        .collect();
1669
1670    // Step 5: Compute statistics on smoothed envelope
1671    // Skip edge regions (first and last 10% of points)
1672    let (interior_start, interior_end) = match interior_bounds(m) {
1673        Some((s, e)) if e > s + 4 => (s, e),
1674        _ => {
1675            return AmplitudeModulationResult {
1676                is_seasonal: true,
1677                seasonal_strength: overall_strength,
1678                has_modulation: false,
1679                modulation_type: ModulationType::Stable,
1680                modulation_score: 0.0,
1681                amplitude_trend: 0.0,
1682                strength_curve: envelope,
1683                time_points: argvals.to_vec(),
1684                min_strength: 0.0,
1685                max_strength: 0.0,
1686            };
1687        }
1688    };
1689
1690    let interior_envelope = &smoothed_envelope[interior_start..interior_end];
1691    let interior_times = &argvals[interior_start..interior_end];
1692    let n_interior = interior_envelope.len() as f64;
1693
1694    let mean_amp = interior_envelope.iter().sum::<f64>() / n_interior;
1695    let min_amp = interior_envelope
1696        .iter()
1697        .cloned()
1698        .fold(f64::INFINITY, f64::min);
1699    let max_amp = interior_envelope
1700        .iter()
1701        .cloned()
1702        .fold(f64::NEG_INFINITY, f64::max);
1703
1704    // Coefficient of variation (modulation score)
1705    let variance = interior_envelope
1706        .iter()
1707        .map(|&a| (a - mean_amp).powi(2))
1708        .sum::<f64>()
1709        / n_interior;
1710    let std_amp = variance.sqrt();
1711    let modulation_score = if mean_amp > 1e-10 {
1712        std_amp / mean_amp
1713    } else {
1714        0.0
1715    };
1716
1717    // Step 6: Compute trend in amplitude (linear regression slope)
1718    let t_mean = interior_times.iter().sum::<f64>() / n_interior;
1719    let mut cov_ta = 0.0;
1720    let mut var_t = 0.0;
1721    for (&t, &a) in interior_times.iter().zip(interior_envelope.iter()) {
1722        cov_ta += (t - t_mean) * (a - mean_amp);
1723        var_t += (t - t_mean).powi(2);
1724    }
1725    let slope = if var_t > 1e-10 { cov_ta / var_t } else { 0.0 };
1726
1727    // Normalize slope to [-1, 1] based on amplitude range and time span
1728    let time_span = interior_times.last().unwrap_or(&1.0) - interior_times.first().unwrap_or(&0.0);
1729    let amp_range = max_amp - min_amp;
1730    let amplitude_trend = if amp_range > 1e-10 && time_span > 1e-10 && mean_amp > 1e-10 {
1731        // Normalized: what fraction of mean amplitude changes per unit time span
1732        (slope * time_span / mean_amp).clamp(-1.0, 1.0)
1733    } else {
1734        0.0
1735    };
1736
1737    // Step 7: Classify modulation type
1738    let has_modulation = modulation_score > modulation_threshold;
1739    let modulation_type = if !has_modulation {
1740        ModulationType::Stable
1741    } else if amplitude_trend > 0.3 {
1742        ModulationType::Emerging
1743    } else if amplitude_trend < -0.3 {
1744        ModulationType::Fading
1745    } else {
1746        ModulationType::Oscillating
1747    };
1748
1749    AmplitudeModulationResult {
1750        is_seasonal: true,
1751        seasonal_strength: overall_strength,
1752        has_modulation,
1753        modulation_type,
1754        modulation_score,
1755        amplitude_trend,
1756        strength_curve: envelope,
1757        time_points: argvals.to_vec(),
1758        min_strength: min_amp,
1759        max_strength: max_amp,
1760    }
1761}
1762
1763/// Detect amplitude modulation using Morlet wavelet transform.
1764///
1765/// Uses continuous wavelet transform at the seasonal period to extract
1766/// time-varying amplitude. This method is more robust to noise and can
1767/// better handle non-stationary signals compared to Hilbert transform.
1768///
1769/// # Arguments
1770/// * `data` - Column-major matrix (n x m)
1771/// * `n` - Number of samples
1772/// * `m` - Number of evaluation points
1773/// * `argvals` - Evaluation points
1774/// * `period` - Seasonal period in argvals units
1775/// * `modulation_threshold` - CV threshold for detecting modulation (default: 0.15)
1776/// * `seasonality_threshold` - Strength threshold for seasonality (default: 0.3)
1777///
1778/// # Returns
1779/// `WaveletAmplitudeResult` containing detection results and wavelet amplitude curve
1780///
1781/// # Notes
1782/// - Uses Morlet wavelet with ω₀ = 6 (standard choice)
1783/// - The scale parameter is derived from the period: scale = period * ω₀ / (2π)
1784/// - This relates to how wavelets measure period: for Morlet, period ≈ scale * 2π / ω₀
1785pub fn detect_amplitude_modulation_wavelet(
1786    data: &[f64],
1787    n: usize,
1788    m: usize,
1789    argvals: &[f64],
1790    period: f64,
1791    modulation_threshold: f64,
1792    seasonality_threshold: f64,
1793) -> WaveletAmplitudeResult {
1794    let empty_result = WaveletAmplitudeResult {
1795        is_seasonal: false,
1796        seasonal_strength: 0.0,
1797        has_modulation: false,
1798        modulation_type: ModulationType::NonSeasonal,
1799        modulation_score: 0.0,
1800        amplitude_trend: 0.0,
1801        wavelet_amplitude: Vec::new(),
1802        time_points: Vec::new(),
1803        scale: 0.0,
1804    };
1805
1806    if n == 0 || m < 4 || argvals.len() != m || period <= 0.0 {
1807        return empty_result;
1808    }
1809
1810    // Step 1: Check if seasonality exists using spectral method
1811    let overall_strength = seasonal_strength_spectral(data, n, m, argvals, period);
1812
1813    if overall_strength < seasonality_threshold {
1814        return WaveletAmplitudeResult {
1815            is_seasonal: false,
1816            seasonal_strength: overall_strength,
1817            has_modulation: false,
1818            modulation_type: ModulationType::NonSeasonal,
1819            modulation_score: 0.0,
1820            amplitude_trend: 0.0,
1821            wavelet_amplitude: Vec::new(),
1822            time_points: argvals.to_vec(),
1823            scale: 0.0,
1824        };
1825    }
1826
1827    // Step 2: Compute mean curve
1828    let mean_curve = compute_mean_curve(data, n, m);
1829
1830    // Remove DC component
1831    let dc: f64 = mean_curve.iter().sum::<f64>() / m as f64;
1832    let detrended: Vec<f64> = mean_curve.iter().map(|&x| x - dc).collect();
1833
1834    // Step 3: Compute wavelet transform at the seasonal period
1835    // For Morlet wavelet: period = scale * 2π / ω₀, so scale = period * ω₀ / (2π)
1836    let omega0 = 6.0; // Standard Morlet parameter
1837    let scale = period * omega0 / (2.0 * PI);
1838
1839    // Use FFT-based CWT for efficiency
1840    let wavelet_coeffs = cwt_morlet_fft(&detrended, argvals, scale, omega0);
1841
1842    if wavelet_coeffs.is_empty() {
1843        return WaveletAmplitudeResult {
1844            is_seasonal: true,
1845            seasonal_strength: overall_strength,
1846            has_modulation: false,
1847            modulation_type: ModulationType::Stable,
1848            modulation_score: 0.0,
1849            amplitude_trend: 0.0,
1850            wavelet_amplitude: Vec::new(),
1851            time_points: argvals.to_vec(),
1852            scale,
1853        };
1854    }
1855
1856    // Step 4: Extract amplitude (magnitude of wavelet coefficients)
1857    let wavelet_amplitude: Vec<f64> = wavelet_coeffs.iter().map(|c| c.norm()).collect();
1858
1859    // Step 5: Compute statistics on amplitude (skip edges)
1860    let (interior_start, interior_end) = match interior_bounds(m) {
1861        Some((s, e)) if e > s + 4 => (s, e),
1862        _ => {
1863            return WaveletAmplitudeResult {
1864                is_seasonal: true,
1865                seasonal_strength: overall_strength,
1866                has_modulation: false,
1867                modulation_type: ModulationType::Stable,
1868                modulation_score: 0.0,
1869                amplitude_trend: 0.0,
1870                wavelet_amplitude,
1871                time_points: argvals.to_vec(),
1872                scale,
1873            };
1874        }
1875    };
1876
1877    let interior_amp = &wavelet_amplitude[interior_start..interior_end];
1878    let interior_times = &argvals[interior_start..interior_end];
1879    let n_interior = interior_amp.len() as f64;
1880
1881    let mean_amp = interior_amp.iter().sum::<f64>() / n_interior;
1882
1883    // Coefficient of variation
1884    let variance = interior_amp
1885        .iter()
1886        .map(|&a| (a - mean_amp).powi(2))
1887        .sum::<f64>()
1888        / n_interior;
1889    let std_amp = variance.sqrt();
1890    let modulation_score = if mean_amp > 1e-10 {
1891        std_amp / mean_amp
1892    } else {
1893        0.0
1894    };
1895
1896    // Step 6: Compute trend
1897    let t_mean = interior_times.iter().sum::<f64>() / n_interior;
1898    let mut cov_ta = 0.0;
1899    let mut var_t = 0.0;
1900    for (&t, &a) in interior_times.iter().zip(interior_amp.iter()) {
1901        cov_ta += (t - t_mean) * (a - mean_amp);
1902        var_t += (t - t_mean).powi(2);
1903    }
1904    let slope = if var_t > 1e-10 { cov_ta / var_t } else { 0.0 };
1905
1906    let time_span = interior_times.last().unwrap_or(&1.0) - interior_times.first().unwrap_or(&0.0);
1907    let amplitude_trend = if mean_amp > 1e-10 && time_span > 1e-10 {
1908        (slope * time_span / mean_amp).clamp(-1.0, 1.0)
1909    } else {
1910        0.0
1911    };
1912
1913    // Step 7: Classify modulation type
1914    let has_modulation = modulation_score > modulation_threshold;
1915    let modulation_type = if !has_modulation {
1916        ModulationType::Stable
1917    } else if amplitude_trend > 0.3 {
1918        ModulationType::Emerging
1919    } else if amplitude_trend < -0.3 {
1920        ModulationType::Fading
1921    } else {
1922        ModulationType::Oscillating
1923    };
1924
1925    WaveletAmplitudeResult {
1926        is_seasonal: true,
1927        seasonal_strength: overall_strength,
1928        has_modulation,
1929        modulation_type,
1930        modulation_score,
1931        amplitude_trend,
1932        wavelet_amplitude,
1933        time_points: argvals.to_vec(),
1934        scale,
1935    }
1936}
1937
1938// ============================================================================
1939// Instantaneous Period
1940// ============================================================================
1941
1942/// Estimate instantaneous period using Hilbert transform.
1943///
1944/// For series with drifting/changing period, this computes the period
1945/// at each time point using the analytic signal.
1946///
1947/// # Arguments
1948/// * `data` - Column-major matrix (n x m)
1949/// * `n` - Number of samples
1950/// * `m` - Number of evaluation points
1951/// * `argvals` - Evaluation points
1952pub fn instantaneous_period(
1953    data: &[f64],
1954    n: usize,
1955    m: usize,
1956    argvals: &[f64],
1957) -> InstantaneousPeriod {
1958    if n == 0 || m < 4 || argvals.len() != m {
1959        return InstantaneousPeriod {
1960            period: Vec::new(),
1961            frequency: Vec::new(),
1962            amplitude: Vec::new(),
1963        };
1964    }
1965
1966    // Compute mean curve
1967    let mean_curve = compute_mean_curve(data, n, m);
1968
1969    // Remove DC component (detrend by subtracting mean)
1970    let dc: f64 = mean_curve.iter().sum::<f64>() / m as f64;
1971    let detrended: Vec<f64> = mean_curve.iter().map(|&x| x - dc).collect();
1972
1973    // Compute analytic signal via Hilbert transform
1974    let analytic = hilbert_transform(&detrended);
1975
1976    // Extract instantaneous amplitude and phase
1977    let amplitude: Vec<f64> = analytic.iter().map(|c| c.norm()).collect();
1978
1979    let phase: Vec<f64> = analytic.iter().map(|c| c.im.atan2(c.re)).collect();
1980
1981    // Unwrap phase
1982    let unwrapped_phase = unwrap_phase(&phase);
1983
1984    // Compute instantaneous frequency (derivative of phase)
1985    let dt = (argvals[m - 1] - argvals[0]) / (m - 1) as f64;
1986    let mut inst_freq = vec![0.0; m];
1987
1988    // Central differences for interior, forward/backward at boundaries
1989    if m > 1 {
1990        inst_freq[0] = (unwrapped_phase[1] - unwrapped_phase[0]) / dt / (2.0 * PI);
1991    }
1992    for j in 1..(m - 1) {
1993        inst_freq[j] = (unwrapped_phase[j + 1] - unwrapped_phase[j - 1]) / (2.0 * dt) / (2.0 * PI);
1994    }
1995    if m > 1 {
1996        inst_freq[m - 1] = (unwrapped_phase[m - 1] - unwrapped_phase[m - 2]) / dt / (2.0 * PI);
1997    }
1998
1999    // Period = 1/frequency (handle near-zero frequencies)
2000    let period: Vec<f64> = inst_freq
2001        .iter()
2002        .map(|&f| {
2003            if f.abs() > 1e-10 {
2004                (1.0 / f).abs()
2005            } else {
2006                f64::INFINITY
2007            }
2008        })
2009        .collect();
2010
2011    InstantaneousPeriod {
2012        period,
2013        frequency: inst_freq,
2014        amplitude,
2015    }
2016}
2017
2018// ============================================================================
2019// Peak Timing Variability Analysis
2020// ============================================================================
2021
2022/// Analyze peak timing variability across cycles.
2023///
2024/// For short series (e.g., 3-5 years of yearly data), this function detects
2025/// one peak per cycle and analyzes how peak timing varies between cycles.
2026///
2027/// # Arguments
2028/// * `data` - Column-major matrix (n x m)
2029/// * `n` - Number of samples
2030/// * `m` - Number of evaluation points
2031/// * `argvals` - Evaluation points
2032/// * `period` - Known period (e.g., 365 for daily data with yearly seasonality)
2033/// * `smooth_nbasis` - Number of Fourier basis functions for smoothing.
2034///   If None, uses GCV for automatic selection.
2035///
2036/// # Returns
2037/// Peak timing result with variability metrics
2038pub fn analyze_peak_timing(
2039    data: &[f64],
2040    n: usize,
2041    m: usize,
2042    argvals: &[f64],
2043    period: f64,
2044    smooth_nbasis: Option<usize>,
2045) -> PeakTimingResult {
2046    if n == 0 || m < 3 || argvals.len() != m || period <= 0.0 {
2047        return PeakTimingResult {
2048            peak_times: Vec::new(),
2049            peak_values: Vec::new(),
2050            normalized_timing: Vec::new(),
2051            mean_timing: f64::NAN,
2052            std_timing: f64::NAN,
2053            range_timing: f64::NAN,
2054            variability_score: f64::NAN,
2055            timing_trend: f64::NAN,
2056            cycle_indices: Vec::new(),
2057        };
2058    }
2059
2060    // Detect peaks with minimum distance constraint of 0.7 * period
2061    // This ensures we get at most one peak per cycle
2062    let min_distance = period * 0.7;
2063    let peaks = detect_peaks(
2064        data,
2065        n,
2066        m,
2067        argvals,
2068        Some(min_distance),
2069        None, // No prominence filter
2070        true, // Smooth first with Fourier basis
2071        smooth_nbasis,
2072    );
2073
2074    // Use the first sample's peaks (for mean curve analysis)
2075    // If multiple samples, we take the mean curve which is effectively in sample 0
2076    let sample_peaks = if peaks.peaks.is_empty() {
2077        Vec::new()
2078    } else {
2079        peaks.peaks[0].clone()
2080    };
2081
2082    if sample_peaks.is_empty() {
2083        return PeakTimingResult {
2084            peak_times: Vec::new(),
2085            peak_values: Vec::new(),
2086            normalized_timing: Vec::new(),
2087            mean_timing: f64::NAN,
2088            std_timing: f64::NAN,
2089            range_timing: f64::NAN,
2090            variability_score: f64::NAN,
2091            timing_trend: f64::NAN,
2092            cycle_indices: Vec::new(),
2093        };
2094    }
2095
2096    let peak_times: Vec<f64> = sample_peaks.iter().map(|p| p.time).collect();
2097    let peak_values: Vec<f64> = sample_peaks.iter().map(|p| p.value).collect();
2098
2099    // Compute normalized timing (position within cycle, 0-1 scale)
2100    let t_start = argvals[0];
2101    let normalized_timing: Vec<f64> = peak_times
2102        .iter()
2103        .map(|&t| {
2104            let cycle_pos = (t - t_start) % period;
2105            cycle_pos / period
2106        })
2107        .collect();
2108
2109    // Compute cycle indices (1-indexed)
2110    let cycle_indices: Vec<usize> = peak_times
2111        .iter()
2112        .map(|&t| ((t - t_start) / period).floor() as usize + 1)
2113        .collect();
2114
2115    // Compute statistics
2116    let n_peaks = normalized_timing.len() as f64;
2117    let mean_timing = normalized_timing.iter().sum::<f64>() / n_peaks;
2118
2119    let variance: f64 = normalized_timing
2120        .iter()
2121        .map(|&x| (x - mean_timing).powi(2))
2122        .sum::<f64>()
2123        / n_peaks;
2124    let std_timing = variance.sqrt();
2125
2126    let min_timing = normalized_timing
2127        .iter()
2128        .cloned()
2129        .fold(f64::INFINITY, f64::min);
2130    let max_timing = normalized_timing
2131        .iter()
2132        .cloned()
2133        .fold(f64::NEG_INFINITY, f64::max);
2134    let range_timing = max_timing - min_timing;
2135
2136    // Variability score: normalized std deviation
2137    // Max possible std for uniform in [0,1] is ~0.289, so we scale by that
2138    // But since peaks cluster, we use 0.1 as "high" variability threshold
2139    let variability_score = (std_timing / 0.1).min(1.0);
2140
2141    // Timing trend: linear regression of normalized timing on cycle index
2142    let cycle_idx_f64: Vec<f64> = cycle_indices.iter().map(|&i| i as f64).collect();
2143    let timing_trend = linear_slope(&cycle_idx_f64, &normalized_timing);
2144
2145    PeakTimingResult {
2146        peak_times,
2147        peak_values,
2148        normalized_timing,
2149        mean_timing,
2150        std_timing,
2151        range_timing,
2152        variability_score,
2153        timing_trend,
2154        cycle_indices,
2155    }
2156}
2157
2158// ============================================================================
2159// Seasonality Classification
2160// ============================================================================
2161
2162/// Classify the type of seasonality in functional data.
2163///
2164/// This is particularly useful for short series (3-5 years) where you need
2165/// to identify:
2166/// - Whether seasonality is present
2167/// - Whether peak timing is stable or variable
2168/// - Which cycles have weak or missing seasonality
2169///
2170/// # Arguments
2171/// * `data` - Column-major matrix (n x m)
2172/// * `n` - Number of samples
2173/// * `m` - Number of evaluation points
2174/// * `argvals` - Evaluation points
2175/// * `period` - Known seasonal period
2176/// * `strength_threshold` - Threshold for seasonal/non-seasonal (default: 0.3)
2177/// * `timing_threshold` - Max std of normalized timing for "stable" (default: 0.05)
2178///
2179/// # Returns
2180/// Seasonality classification with type and diagnostics
2181pub fn classify_seasonality(
2182    data: &[f64],
2183    n: usize,
2184    m: usize,
2185    argvals: &[f64],
2186    period: f64,
2187    strength_threshold: Option<f64>,
2188    timing_threshold: Option<f64>,
2189) -> SeasonalityClassification {
2190    let strength_thresh = strength_threshold.unwrap_or(0.3);
2191    let timing_thresh = timing_threshold.unwrap_or(0.05);
2192
2193    if n == 0 || m < 4 || argvals.len() != m || period <= 0.0 {
2194        return SeasonalityClassification {
2195            is_seasonal: false,
2196            has_stable_timing: false,
2197            timing_variability: f64::NAN,
2198            seasonal_strength: f64::NAN,
2199            cycle_strengths: Vec::new(),
2200            weak_seasons: Vec::new(),
2201            classification: SeasonalType::NonSeasonal,
2202            peak_timing: None,
2203        };
2204    }
2205
2206    // Compute overall seasonal strength
2207    let overall_strength = seasonal_strength_variance(data, n, m, argvals, period, 3);
2208
2209    // Compute per-cycle strength
2210    let dt = (argvals[m - 1] - argvals[0]) / (m - 1) as f64;
2211    let _points_per_cycle = (period / dt).round() as usize;
2212    let t_start = argvals[0];
2213    let t_end = argvals[m - 1];
2214    let n_cycles = ((t_end - t_start) / period).floor() as usize;
2215
2216    let mut cycle_strengths = Vec::with_capacity(n_cycles);
2217    let mut weak_seasons = Vec::new();
2218
2219    for cycle in 0..n_cycles {
2220        let cycle_start = t_start + cycle as f64 * period;
2221        let cycle_end = cycle_start + period;
2222
2223        // Find indices for this cycle
2224        let start_idx = argvals.iter().position(|&t| t >= cycle_start).unwrap_or(0);
2225        let end_idx = argvals.iter().position(|&t| t > cycle_end).unwrap_or(m);
2226
2227        let cycle_m = end_idx - start_idx;
2228        if cycle_m < 4 {
2229            cycle_strengths.push(f64::NAN);
2230            continue;
2231        }
2232
2233        // Extract cycle data
2234        let cycle_data: Vec<f64> = (start_idx..end_idx)
2235            .flat_map(|j| (0..n).map(move |i| data[i + j * n]))
2236            .collect();
2237        let cycle_argvals: Vec<f64> = argvals[start_idx..end_idx].to_vec();
2238
2239        let strength =
2240            seasonal_strength_variance(&cycle_data, n, cycle_m, &cycle_argvals, period, 3);
2241
2242        cycle_strengths.push(strength);
2243
2244        if strength < strength_thresh {
2245            weak_seasons.push(cycle);
2246        }
2247    }
2248
2249    // Analyze peak timing
2250    let peak_timing = analyze_peak_timing(data, n, m, argvals, period, None);
2251
2252    // Determine classification
2253    let is_seasonal = overall_strength >= strength_thresh;
2254    let has_stable_timing = peak_timing.std_timing <= timing_thresh;
2255    let timing_variability = peak_timing.variability_score;
2256
2257    // Classify based on patterns
2258    let n_weak = weak_seasons.len();
2259    let classification = if !is_seasonal {
2260        SeasonalType::NonSeasonal
2261    } else if n_cycles > 0 && n_weak as f64 / n_cycles as f64 > 0.3 {
2262        // More than 30% of cycles are weak
2263        SeasonalType::IntermittentSeasonal
2264    } else if !has_stable_timing {
2265        SeasonalType::VariableTiming
2266    } else {
2267        SeasonalType::StableSeasonal
2268    };
2269
2270    SeasonalityClassification {
2271        is_seasonal,
2272        has_stable_timing,
2273        timing_variability,
2274        seasonal_strength: overall_strength,
2275        cycle_strengths,
2276        weak_seasons,
2277        classification,
2278        peak_timing: Some(peak_timing),
2279    }
2280}
2281
2282/// Detect seasonality changes with automatic threshold selection.
2283///
2284/// Uses Otsu's method or percentile-based threshold instead of a fixed value.
2285///
2286/// # Arguments
2287/// * `data` - Column-major matrix (n x m)
2288/// * `n` - Number of samples
2289/// * `m` - Number of evaluation points
2290/// * `argvals` - Evaluation points
2291/// * `period` - Seasonal period
2292/// * `threshold_method` - Method for threshold selection
2293/// * `window_size` - Window size for local strength estimation
2294/// * `min_duration` - Minimum duration to confirm a change
2295pub fn detect_seasonality_changes_auto(
2296    data: &[f64],
2297    n: usize,
2298    m: usize,
2299    argvals: &[f64],
2300    period: f64,
2301    threshold_method: ThresholdMethod,
2302    window_size: f64,
2303    min_duration: f64,
2304) -> ChangeDetectionResult {
2305    if n == 0 || m < 4 || argvals.len() != m {
2306        return ChangeDetectionResult {
2307            change_points: Vec::new(),
2308            strength_curve: Vec::new(),
2309        };
2310    }
2311
2312    // Compute time-varying seasonal strength
2313    let strength_curve = seasonal_strength_windowed(
2314        data,
2315        n,
2316        m,
2317        argvals,
2318        period,
2319        window_size,
2320        StrengthMethod::Variance,
2321    );
2322
2323    if strength_curve.is_empty() {
2324        return ChangeDetectionResult {
2325            change_points: Vec::new(),
2326            strength_curve: Vec::new(),
2327        };
2328    }
2329
2330    // Determine threshold
2331    let threshold = match threshold_method {
2332        ThresholdMethod::Fixed(t) => t,
2333        ThresholdMethod::Percentile(p) => {
2334            let mut sorted: Vec<f64> = strength_curve
2335                .iter()
2336                .copied()
2337                .filter(|x| x.is_finite())
2338                .collect();
2339            sorted.sort_by(|a, b| a.partial_cmp(b).unwrap_or(std::cmp::Ordering::Equal));
2340            if sorted.is_empty() {
2341                0.5
2342            } else {
2343                let idx = ((p / 100.0) * sorted.len() as f64) as usize;
2344                sorted[idx.min(sorted.len() - 1)]
2345            }
2346        }
2347        ThresholdMethod::Otsu => otsu_threshold(&strength_curve),
2348    };
2349
2350    // Now use the regular detection with computed threshold
2351    detect_seasonality_changes(
2352        data,
2353        n,
2354        m,
2355        argvals,
2356        period,
2357        threshold,
2358        window_size,
2359        min_duration,
2360    )
2361}
2362
2363/// Result of SAZED ensemble period detection.
2364#[derive(Debug, Clone)]
2365pub struct SazedResult {
2366    /// Primary detected period (consensus from ensemble)
2367    pub period: f64,
2368    /// Confidence score (0-1, based on component agreement)
2369    pub confidence: f64,
2370    /// Periods detected by each component (may be NaN if not detected)
2371    pub component_periods: SazedComponents,
2372    /// Number of components that agreed on the final period
2373    pub agreeing_components: usize,
2374}
2375
2376/// Individual period estimates from each SAZED component.
2377#[derive(Debug, Clone)]
2378pub struct SazedComponents {
2379    /// Period from spectral (FFT) detection
2380    pub spectral: f64,
2381    /// Period from ACF peak detection
2382    pub acf_peak: f64,
2383    /// Period from weighted ACF average
2384    pub acf_average: f64,
2385    /// Period from ACF zero-crossing analysis
2386    pub zero_crossing: f64,
2387    /// Period from spectral differencing
2388    pub spectral_diff: f64,
2389}
2390
2391/// SAZED: Spectral-ACF Zero-crossing Ensemble Detection
2392///
2393/// A parameter-free ensemble method for robust period detection.
2394/// Combines 5 detection components:
2395/// 1. Spectral (FFT) - peaks in periodogram
2396/// 2. ACF peak - first significant peak in autocorrelation
2397/// 3. ACF average - weighted mean of ACF peaks
2398/// 4. Zero-crossing - period from ACF zero crossings
2399/// 5. Spectral differencing - FFT on first-differenced signal
2400///
2401/// Each component provides both a period estimate and a confidence score.
2402/// Only components with sufficient confidence participate in voting.
2403/// The final period is chosen by majority voting with tolerance.
2404///
2405/// # Arguments
2406/// * `data` - Input signal (1D time series or mean curve from fdata)
2407/// * `argvals` - Time points corresponding to data
2408/// * `tolerance` - Relative tolerance for considering periods equal (default: 0.05 = 5%)
2409///
2410/// # Returns
2411/// * `SazedResult` containing the consensus period and component details
2412pub fn sazed(data: &[f64], argvals: &[f64], tolerance: Option<f64>) -> SazedResult {
2413    let n = data.len();
2414    let tol = tolerance.unwrap_or(0.05); // Tighter default tolerance
2415
2416    if n < 8 || argvals.len() != n {
2417        return SazedResult {
2418            period: f64::NAN,
2419            confidence: 0.0,
2420            component_periods: SazedComponents {
2421                spectral: f64::NAN,
2422                acf_peak: f64::NAN,
2423                acf_average: f64::NAN,
2424                zero_crossing: f64::NAN,
2425                spectral_diff: f64::NAN,
2426            },
2427            agreeing_components: 0,
2428        };
2429    }
2430
2431    let dt = (argvals[n - 1] - argvals[0]) / (n - 1) as f64;
2432    let max_lag = (n / 2).max(4);
2433    let signal_range = argvals[n - 1] - argvals[0];
2434
2435    // Minimum detectable period (at least 3 cycles)
2436    let min_period = signal_range / (n as f64 / 3.0);
2437    // Maximum detectable period (at most 2 complete cycles)
2438    let max_period = signal_range / 2.0;
2439
2440    // Component 1: Spectral (FFT) detection with confidence
2441    let (spectral_period, spectral_conf) = sazed_spectral_with_confidence(data, argvals);
2442
2443    // Component 2: ACF peak detection with confidence
2444    let (acf_peak_period, acf_peak_conf) = sazed_acf_peak_with_confidence(data, dt, max_lag);
2445
2446    // Component 3: ACF weighted average (uses ACF peak confidence)
2447    let acf_average_period = sazed_acf_average(data, dt, max_lag);
2448
2449    // Component 4: Zero-crossing analysis with confidence
2450    let (zero_crossing_period, zero_crossing_conf) =
2451        sazed_zero_crossing_with_confidence(data, dt, max_lag);
2452
2453    // Component 5: Spectral on differenced signal with confidence
2454    let (spectral_diff_period, spectral_diff_conf) =
2455        sazed_spectral_diff_with_confidence(data, argvals);
2456
2457    let components = SazedComponents {
2458        spectral: spectral_period,
2459        acf_peak: acf_peak_period,
2460        acf_average: acf_average_period,
2461        zero_crossing: zero_crossing_period,
2462        spectral_diff: spectral_diff_period,
2463    };
2464
2465    // Confidence thresholds for each component (tuned to minimize FPR on noise)
2466    // For Gaussian noise: spectral peaks rarely exceed 6x median, ACF ~1/sqrt(n)
2467    let spectral_thresh = 8.0; // Power ratio must be > 8x median (noise rarely exceeds 6x)
2468    let acf_thresh = 0.3; // ACF correlation must be > 0.3 (noise ~0.1 for n=100)
2469    let zero_crossing_thresh = 0.9; // Zero-crossing consistency > 90%
2470    let spectral_diff_thresh = 6.0; // Diff spectral power ratio > 6x
2471
2472    // Minimum number of confident components required to report a period
2473    let min_confident_components = 2;
2474
2475    // Collect valid periods (only from components with sufficient confidence)
2476    let mut confident_periods: Vec<f64> = Vec::new();
2477
2478    if spectral_period.is_finite()
2479        && spectral_period > min_period
2480        && spectral_period < max_period
2481        && spectral_conf > spectral_thresh
2482    {
2483        confident_periods.push(spectral_period);
2484    }
2485
2486    if acf_peak_period.is_finite()
2487        && acf_peak_period > min_period
2488        && acf_peak_period < max_period
2489        && acf_peak_conf > acf_thresh
2490    {
2491        confident_periods.push(acf_peak_period);
2492    }
2493
2494    // ACF average only counted if ACF peak is confident
2495    if acf_average_period.is_finite()
2496        && acf_average_period > min_period
2497        && acf_average_period < max_period
2498        && acf_peak_conf > acf_thresh
2499    {
2500        confident_periods.push(acf_average_period);
2501    }
2502
2503    if zero_crossing_period.is_finite()
2504        && zero_crossing_period > min_period
2505        && zero_crossing_period < max_period
2506        && zero_crossing_conf > zero_crossing_thresh
2507    {
2508        confident_periods.push(zero_crossing_period);
2509    }
2510
2511    if spectral_diff_period.is_finite()
2512        && spectral_diff_period > min_period
2513        && spectral_diff_period < max_period
2514        && spectral_diff_conf > spectral_diff_thresh
2515    {
2516        confident_periods.push(spectral_diff_period);
2517    }
2518
2519    // Require minimum number of confident components before reporting a period
2520    if confident_periods.len() < min_confident_components {
2521        return SazedResult {
2522            period: f64::NAN,
2523            confidence: 0.0,
2524            component_periods: components,
2525            agreeing_components: confident_periods.len(),
2526        };
2527    }
2528
2529    // Ensemble voting: find the mode with tolerance
2530    let (consensus_period, agreeing_count) = find_consensus_period(&confident_periods, tol);
2531    let confidence = agreeing_count as f64 / 5.0;
2532
2533    SazedResult {
2534        period: consensus_period,
2535        confidence,
2536        component_periods: components,
2537        agreeing_components: agreeing_count,
2538    }
2539}
2540
2541/// SAZED for functional data (matrix format)
2542///
2543/// Computes mean curve first, then applies SAZED.
2544///
2545/// # Arguments
2546/// * `data` - Column-major matrix (n x m)
2547/// * `n` - Number of samples
2548/// * `m` - Number of evaluation points
2549/// * `argvals` - Evaluation points
2550/// * `tolerance` - Relative tolerance for period matching
2551pub fn sazed_fdata(
2552    data: &[f64],
2553    n: usize,
2554    m: usize,
2555    argvals: &[f64],
2556    tolerance: Option<f64>,
2557) -> SazedResult {
2558    if n == 0 || m < 8 || argvals.len() != m {
2559        return SazedResult {
2560            period: f64::NAN,
2561            confidence: 0.0,
2562            component_periods: SazedComponents {
2563                spectral: f64::NAN,
2564                acf_peak: f64::NAN,
2565                acf_average: f64::NAN,
2566                zero_crossing: f64::NAN,
2567                spectral_diff: f64::NAN,
2568            },
2569            agreeing_components: 0,
2570        };
2571    }
2572
2573    let mean_curve = compute_mean_curve(data, n, m);
2574    sazed(&mean_curve, argvals, tolerance)
2575}
2576
2577/// Spectral component with confidence: returns (period, power_ratio)
2578fn sazed_spectral_with_confidence(data: &[f64], argvals: &[f64]) -> (f64, f64) {
2579    let (frequencies, power) = periodogram(data, argvals);
2580
2581    if frequencies.len() < 3 {
2582        return (f64::NAN, 0.0);
2583    }
2584
2585    // Find peaks in power spectrum (skip DC)
2586    let power_no_dc: Vec<f64> = power.iter().skip(1).copied().collect();
2587
2588    if power_no_dc.is_empty() {
2589        return (f64::NAN, 0.0);
2590    }
2591
2592    // Calculate noise floor as median
2593    let mut sorted_power = power_no_dc.clone();
2594    sorted_power.sort_by(|a, b| a.partial_cmp(b).unwrap_or(std::cmp::Ordering::Equal));
2595    let noise_floor = sorted_power[sorted_power.len() / 2].max(1e-15);
2596
2597    // Find global maximum
2598    let mut max_idx = 0;
2599    let mut max_val = 0.0;
2600    for (i, &p) in power_no_dc.iter().enumerate() {
2601        if p > max_val {
2602            max_val = p;
2603            max_idx = i;
2604        }
2605    }
2606
2607    let power_ratio = max_val / noise_floor;
2608    let freq = frequencies[max_idx + 1];
2609
2610    if freq > 1e-15 {
2611        (1.0 / freq, power_ratio)
2612    } else {
2613        (f64::NAN, 0.0)
2614    }
2615}
2616
2617/// ACF peak component with confidence: returns (period, acf_value_at_peak)
2618fn sazed_acf_peak_with_confidence(data: &[f64], dt: f64, max_lag: usize) -> (f64, f64) {
2619    let acf = autocorrelation(data, max_lag);
2620
2621    if acf.len() < 4 {
2622        return (f64::NAN, 0.0);
2623    }
2624
2625    // Find first peak after initial descent
2626    // Skip lag 0 and find where ACF first becomes negative or reaches minimum
2627    let mut min_search_start = 1;
2628    for i in 1..acf.len() {
2629        if acf[i] < 0.0 {
2630            min_search_start = i;
2631            break;
2632        }
2633        if i > 1 && acf[i] > acf[i - 1] {
2634            min_search_start = i - 1;
2635            break;
2636        }
2637    }
2638
2639    // Find first peak after minimum
2640    let peaks = find_peaks_1d(&acf[min_search_start..], 1);
2641
2642    if peaks.is_empty() {
2643        return (f64::NAN, 0.0);
2644    }
2645
2646    let peak_lag = peaks[0] + min_search_start;
2647    let acf_value = acf[peak_lag].max(0.0); // Confidence is the ACF value at the peak
2648
2649    (peak_lag as f64 * dt, acf_value)
2650}
2651
2652/// ACF average component: weighted mean of ACF peak locations
2653fn sazed_acf_average(data: &[f64], dt: f64, max_lag: usize) -> f64 {
2654    let acf = autocorrelation(data, max_lag);
2655
2656    if acf.len() < 4 {
2657        return f64::NAN;
2658    }
2659
2660    // Find all peaks in ACF
2661    let peaks = find_peaks_1d(&acf[1..], 1);
2662
2663    if peaks.is_empty() {
2664        return f64::NAN;
2665    }
2666
2667    // Weight peaks by their ACF value
2668    let mut weighted_sum = 0.0;
2669    let mut weight_sum = 0.0;
2670
2671    for (i, &peak_idx) in peaks.iter().enumerate() {
2672        let lag = peak_idx + 1;
2673        let weight = acf[lag].max(0.0);
2674
2675        if i == 0 {
2676            // First peak is the fundamental period
2677            weighted_sum += lag as f64 * weight;
2678            weight_sum += weight;
2679        } else {
2680            // Later peaks: estimate fundamental by dividing by harmonic number
2681            let expected_fundamental = peaks[0] + 1;
2682            let harmonic = ((lag as f64 / expected_fundamental as f64) + 0.5) as usize;
2683            if harmonic > 0 {
2684                let fundamental_est = lag as f64 / harmonic as f64;
2685                weighted_sum += fundamental_est * weight;
2686                weight_sum += weight;
2687            }
2688        }
2689    }
2690
2691    if weight_sum > 1e-15 {
2692        weighted_sum / weight_sum * dt
2693    } else {
2694        f64::NAN
2695    }
2696}
2697
2698/// Zero-crossing component with confidence: returns (period, consistency)
2699/// Consistency is how regular the zero crossings are (std/mean of half-periods)
2700fn sazed_zero_crossing_with_confidence(data: &[f64], dt: f64, max_lag: usize) -> (f64, f64) {
2701    let acf = autocorrelation(data, max_lag);
2702
2703    if acf.len() < 4 {
2704        return (f64::NAN, 0.0);
2705    }
2706
2707    // Find zero crossings (sign changes)
2708    let mut crossings = Vec::new();
2709    for i in 1..acf.len() {
2710        if acf[i - 1] * acf[i] < 0.0 {
2711            // Linear interpolation for more precise crossing
2712            let frac = acf[i - 1].abs() / (acf[i - 1].abs() + acf[i].abs());
2713            crossings.push((i - 1) as f64 + frac);
2714        }
2715    }
2716
2717    if crossings.len() < 2 {
2718        return (f64::NAN, 0.0);
2719    }
2720
2721    // Period is twice the distance between consecutive zero crossings
2722    // (ACF goes through two zero crossings per period)
2723    let mut half_periods = Vec::new();
2724    for i in 1..crossings.len() {
2725        half_periods.push(crossings[i] - crossings[i - 1]);
2726    }
2727
2728    if half_periods.is_empty() {
2729        return (f64::NAN, 0.0);
2730    }
2731
2732    // Calculate consistency: 1 - (std/mean) of half-periods
2733    // High consistency means regular zero crossings
2734    let mean: f64 = half_periods.iter().sum::<f64>() / half_periods.len() as f64;
2735    let variance: f64 =
2736        half_periods.iter().map(|x| (x - mean).powi(2)).sum::<f64>() / half_periods.len() as f64;
2737    let std = variance.sqrt();
2738    let consistency = (1.0 - std / mean.max(1e-15)).clamp(0.0, 1.0);
2739
2740    // Median half-period
2741    half_periods.sort_by(|a, b| a.partial_cmp(b).unwrap_or(std::cmp::Ordering::Equal));
2742    let median_half = half_periods[half_periods.len() / 2];
2743
2744    (2.0 * median_half * dt, consistency)
2745}
2746
2747/// Spectral differencing with confidence: returns (period, power_ratio)
2748fn sazed_spectral_diff_with_confidence(data: &[f64], argvals: &[f64]) -> (f64, f64) {
2749    if data.len() < 4 {
2750        return (f64::NAN, 0.0);
2751    }
2752
2753    // First difference to remove trend
2754    let diff: Vec<f64> = data.windows(2).map(|w| w[1] - w[0]).collect();
2755    let diff_argvals: Vec<f64> = argvals.windows(2).map(|w| (w[0] + w[1]) / 2.0).collect();
2756
2757    sazed_spectral_with_confidence(&diff, &diff_argvals)
2758}
2759
2760/// Find peaks in power spectrum above noise floor
2761fn find_spectral_peaks(power: &[f64]) -> Vec<usize> {
2762    if power.len() < 3 {
2763        return Vec::new();
2764    }
2765
2766    // Estimate noise floor as median power
2767    let mut sorted_power: Vec<f64> = power.to_vec();
2768    sorted_power.sort_by(|a, b| a.partial_cmp(b).unwrap_or(std::cmp::Ordering::Equal));
2769    let noise_floor = sorted_power[sorted_power.len() / 2];
2770    let threshold = noise_floor * 2.0; // Peaks must be at least 2x median
2771
2772    // Find all local maxima above threshold
2773    let mut peaks: Vec<(usize, f64)> = Vec::new();
2774    for i in 1..(power.len() - 1) {
2775        if power[i] > power[i - 1] && power[i] > power[i + 1] && power[i] > threshold {
2776            peaks.push((i, power[i]));
2777        }
2778    }
2779
2780    // Sort by power (descending)
2781    peaks.sort_by(|a, b| b.1.partial_cmp(&a.1).unwrap_or(std::cmp::Ordering::Equal));
2782
2783    peaks.into_iter().map(|(idx, _)| idx).collect()
2784}
2785
2786/// Find consensus period from multiple estimates using tolerance-based voting
2787fn find_consensus_period(periods: &[f64], tolerance: f64) -> (f64, usize) {
2788    if periods.is_empty() {
2789        return (f64::NAN, 0);
2790    }
2791    if periods.len() == 1 {
2792        return (periods[0], 1);
2793    }
2794
2795    // For each period, count how many others are within tolerance
2796    let mut best_period = periods[0];
2797    let mut best_count = 0;
2798    let mut best_sum = 0.0;
2799
2800    for &p1 in periods {
2801        let mut count = 0;
2802        let mut sum = 0.0;
2803
2804        for &p2 in periods {
2805            let rel_diff = (p1 - p2).abs() / p1.max(p2);
2806            if rel_diff <= tolerance {
2807                count += 1;
2808                sum += p2;
2809            }
2810        }
2811
2812        if count > best_count
2813            || (count == best_count && sum / count as f64 > best_sum / best_count.max(1) as f64)
2814        {
2815            best_count = count;
2816            best_period = sum / count as f64; // Average of agreeing periods
2817            best_sum = sum;
2818        }
2819    }
2820
2821    (best_period, best_count)
2822}
2823
2824/// Result of Autoperiod detection.
2825#[derive(Debug, Clone)]
2826pub struct AutoperiodResult {
2827    /// Detected period
2828    pub period: f64,
2829    /// Combined confidence (FFT * ACF validation)
2830    pub confidence: f64,
2831    /// FFT power at the detected period
2832    pub fft_power: f64,
2833    /// ACF validation score (0-1)
2834    pub acf_validation: f64,
2835    /// All candidate periods considered
2836    pub candidates: Vec<AutoperiodCandidate>,
2837}
2838
2839/// A candidate period from Autoperiod detection.
2840#[derive(Debug, Clone)]
2841pub struct AutoperiodCandidate {
2842    /// Candidate period
2843    pub period: f64,
2844    /// FFT power
2845    pub fft_power: f64,
2846    /// ACF validation score
2847    pub acf_score: f64,
2848    /// Combined score (power * validation)
2849    pub combined_score: f64,
2850}
2851
2852/// Autoperiod: Hybrid FFT + ACF Period Detection
2853///
2854/// Implements the Autoperiod algorithm (Vlachos et al. 2005) which:
2855/// 1. Computes the periodogram via FFT to find candidate periods
2856/// 2. Validates each candidate using the autocorrelation function
2857/// 3. Applies gradient ascent to refine the period estimate
2858/// 4. Returns the period with the highest combined confidence
2859///
2860/// This method is more robust than pure FFT because ACF validation
2861/// filters out spurious spectral peaks that don't correspond to
2862/// true periodicity.
2863///
2864/// # Arguments
2865/// * `data` - Input signal (1D time series)
2866/// * `argvals` - Time points corresponding to data
2867/// * `n_candidates` - Maximum number of FFT peaks to consider (default: 5)
2868/// * `gradient_steps` - Number of gradient ascent refinement steps (default: 10)
2869///
2870/// # Returns
2871/// * `AutoperiodResult` containing the best period and validation details
2872pub fn autoperiod(
2873    data: &[f64],
2874    argvals: &[f64],
2875    n_candidates: Option<usize>,
2876    gradient_steps: Option<usize>,
2877) -> AutoperiodResult {
2878    let n = data.len();
2879    let max_candidates = n_candidates.unwrap_or(5);
2880    let steps = gradient_steps.unwrap_or(10);
2881
2882    if n < 8 || argvals.len() != n {
2883        return AutoperiodResult {
2884            period: f64::NAN,
2885            confidence: 0.0,
2886            fft_power: 0.0,
2887            acf_validation: 0.0,
2888            candidates: Vec::new(),
2889        };
2890    }
2891
2892    let dt = (argvals[n - 1] - argvals[0]) / (n - 1) as f64;
2893    let max_lag = (n / 2).max(4);
2894
2895    // Step 1: Compute periodogram and find candidate periods
2896    let (frequencies, power) = periodogram(data, argvals);
2897
2898    if frequencies.len() < 3 {
2899        return AutoperiodResult {
2900            period: f64::NAN,
2901            confidence: 0.0,
2902            fft_power: 0.0,
2903            acf_validation: 0.0,
2904            candidates: Vec::new(),
2905        };
2906    }
2907
2908    // Find top spectral peaks
2909    let power_no_dc: Vec<f64> = power.iter().skip(1).copied().collect();
2910    let peak_indices = find_spectral_peaks(&power_no_dc);
2911
2912    if peak_indices.is_empty() {
2913        return AutoperiodResult {
2914            period: f64::NAN,
2915            confidence: 0.0,
2916            fft_power: 0.0,
2917            acf_validation: 0.0,
2918            candidates: Vec::new(),
2919        };
2920    }
2921
2922    // Step 2: Compute ACF for validation
2923    let acf = autocorrelation(data, max_lag);
2924
2925    // Step 3: Validate each candidate and refine with gradient ascent
2926    let mut candidates: Vec<AutoperiodCandidate> = Vec::new();
2927    let total_power: f64 = power_no_dc.iter().sum();
2928
2929    for &peak_idx in peak_indices.iter().take(max_candidates) {
2930        let freq = frequencies[peak_idx + 1];
2931        if freq < 1e-15 {
2932            continue;
2933        }
2934
2935        let initial_period = 1.0 / freq;
2936        let fft_power = power_no_dc[peak_idx];
2937        let normalized_power = fft_power / total_power.max(1e-15);
2938
2939        // Refine period using gradient ascent on ACF
2940        let refined_period = refine_period_gradient(&acf, initial_period, dt, steps);
2941
2942        // Revalidate refined period
2943        let refined_acf_score = validate_period_acf(&acf, refined_period, dt);
2944
2945        let combined_score = normalized_power * refined_acf_score;
2946
2947        candidates.push(AutoperiodCandidate {
2948            period: refined_period,
2949            fft_power,
2950            acf_score: refined_acf_score,
2951            combined_score,
2952        });
2953    }
2954
2955    if candidates.is_empty() {
2956        return AutoperiodResult {
2957            period: f64::NAN,
2958            confidence: 0.0,
2959            fft_power: 0.0,
2960            acf_validation: 0.0,
2961            candidates: Vec::new(),
2962        };
2963    }
2964
2965    // Select best candidate based on combined score
2966    let best = candidates
2967        .iter()
2968        .max_by(|a, b| {
2969            a.combined_score
2970                .partial_cmp(&b.combined_score)
2971                .unwrap_or(std::cmp::Ordering::Equal)
2972        })
2973        .unwrap();
2974
2975    AutoperiodResult {
2976        period: best.period,
2977        confidence: best.combined_score,
2978        fft_power: best.fft_power,
2979        acf_validation: best.acf_score,
2980        candidates,
2981    }
2982}
2983
2984/// Autoperiod for functional data (matrix format)
2985pub fn autoperiod_fdata(
2986    data: &[f64],
2987    n: usize,
2988    m: usize,
2989    argvals: &[f64],
2990    n_candidates: Option<usize>,
2991    gradient_steps: Option<usize>,
2992) -> AutoperiodResult {
2993    if n == 0 || m < 8 || argvals.len() != m {
2994        return AutoperiodResult {
2995            period: f64::NAN,
2996            confidence: 0.0,
2997            fft_power: 0.0,
2998            acf_validation: 0.0,
2999            candidates: Vec::new(),
3000        };
3001    }
3002
3003    let mean_curve = compute_mean_curve(data, n, m);
3004    autoperiod(&mean_curve, argvals, n_candidates, gradient_steps)
3005}
3006
3007/// Validate a candidate period using ACF
3008fn validate_period_acf(acf: &[f64], period: f64, dt: f64) -> f64 {
3009    let lag = (period / dt).round() as usize;
3010
3011    if lag == 0 || lag >= acf.len() {
3012        return 0.0;
3013    }
3014
3015    // Score based on ACF value at the period lag
3016    // Positive ACF values indicate valid periodicity
3017    let acf_at_lag = acf[lag];
3018
3019    // Also check harmonics (period/2, period*2) for consistency
3020    let half_lag = lag / 2;
3021    let double_lag = lag * 2;
3022
3023    let mut score = acf_at_lag.max(0.0);
3024
3025    // For a true period, ACF at half-period should be low/negative
3026    // and ACF at double-period should also be high
3027    if half_lag > 0 && half_lag < acf.len() {
3028        let half_acf = acf[half_lag];
3029        // Penalize if half-period has high ACF (suggests half-period is real)
3030        if half_acf > acf_at_lag * 0.7 {
3031            score *= 0.5;
3032        }
3033    }
3034
3035    if double_lag < acf.len() {
3036        let double_acf = acf[double_lag];
3037        // Bonus if double-period also shows periodicity
3038        if double_acf > 0.3 {
3039            score *= 1.2;
3040        }
3041    }
3042
3043    score.min(1.0)
3044}
3045
3046/// Refine period estimate using gradient ascent on ACF
3047fn refine_period_gradient(acf: &[f64], initial_period: f64, dt: f64, steps: usize) -> f64 {
3048    let mut period = initial_period;
3049    let step_size = dt * 0.5; // Search step size
3050
3051    for _ in 0..steps {
3052        let current_score = validate_period_acf(acf, period, dt);
3053        let left_score = validate_period_acf(acf, period - step_size, dt);
3054        let right_score = validate_period_acf(acf, period + step_size, dt);
3055
3056        if left_score > current_score && left_score > right_score {
3057            period -= step_size;
3058        } else if right_score > current_score {
3059            period += step_size;
3060        }
3061        // If current is best, we've converged
3062    }
3063
3064    period.max(dt) // Ensure period is at least one time step
3065}
3066
3067/// Result of CFDAutoperiod detection.
3068#[derive(Debug, Clone)]
3069pub struct CfdAutoperiodResult {
3070    /// Detected period (primary)
3071    pub period: f64,
3072    /// Confidence score
3073    pub confidence: f64,
3074    /// ACF validation score for the primary period
3075    pub acf_validation: f64,
3076    /// All detected periods (cluster centers)
3077    pub periods: Vec<f64>,
3078    /// Confidence for each detected period
3079    pub confidences: Vec<f64>,
3080}
3081
3082/// CFDAutoperiod: Clustered Filtered Detrended Autoperiod
3083///
3084/// Implements the CFDAutoperiod algorithm (Puech et al. 2020) which:
3085/// 1. Applies first-order differencing to remove trends
3086/// 2. Computes FFT on the detrended signal
3087/// 3. Identifies candidate periods from periodogram peaks
3088/// 4. Clusters nearby candidates using density-based clustering
3089/// 5. Validates cluster centers using ACF on the original signal
3090///
3091/// This method is particularly effective for signals with strong trends
3092/// and handles multiple periodicities by detecting clusters of candidate periods.
3093///
3094/// # Arguments
3095/// * `data` - Input signal (1D time series)
3096/// * `argvals` - Time points corresponding to data
3097/// * `cluster_tolerance` - Relative tolerance for clustering periods (default: 0.1 = 10%)
3098/// * `min_cluster_size` - Minimum number of candidates to form a cluster (default: 1)
3099///
3100/// # Returns
3101/// * `CfdAutoperiodResult` containing detected periods and validation scores
3102pub fn cfd_autoperiod(
3103    data: &[f64],
3104    argvals: &[f64],
3105    cluster_tolerance: Option<f64>,
3106    min_cluster_size: Option<usize>,
3107) -> CfdAutoperiodResult {
3108    let n = data.len();
3109    let tol = cluster_tolerance.unwrap_or(0.1);
3110    let min_size = min_cluster_size.unwrap_or(1);
3111
3112    if n < 8 || argvals.len() != n {
3113        return CfdAutoperiodResult {
3114            period: f64::NAN,
3115            confidence: 0.0,
3116            acf_validation: 0.0,
3117            periods: Vec::new(),
3118            confidences: Vec::new(),
3119        };
3120    }
3121
3122    let dt = (argvals[n - 1] - argvals[0]) / (n - 1) as f64;
3123    let max_lag = (n / 2).max(4);
3124
3125    // Step 1: Apply first-order differencing to detrend
3126    let diff: Vec<f64> = data.windows(2).map(|w| w[1] - w[0]).collect();
3127    let diff_argvals: Vec<f64> = argvals.windows(2).map(|w| (w[0] + w[1]) / 2.0).collect();
3128
3129    // Step 2: Compute periodogram on detrended signal
3130    let (frequencies, power) = periodogram(&diff, &diff_argvals);
3131
3132    if frequencies.len() < 3 {
3133        return CfdAutoperiodResult {
3134            period: f64::NAN,
3135            confidence: 0.0,
3136            acf_validation: 0.0,
3137            periods: Vec::new(),
3138            confidences: Vec::new(),
3139        };
3140    }
3141
3142    // Step 3: Find all candidate periods from spectral peaks
3143    let power_no_dc: Vec<f64> = power.iter().skip(1).copied().collect();
3144    let peak_indices = find_spectral_peaks(&power_no_dc);
3145
3146    if peak_indices.is_empty() {
3147        return CfdAutoperiodResult {
3148            period: f64::NAN,
3149            confidence: 0.0,
3150            acf_validation: 0.0,
3151            periods: Vec::new(),
3152            confidences: Vec::new(),
3153        };
3154    }
3155
3156    // Convert peak indices to periods with their powers
3157    let total_power: f64 = power_no_dc.iter().sum();
3158    let mut candidates: Vec<(f64, f64)> = Vec::new(); // (period, normalized_power)
3159
3160    for &peak_idx in &peak_indices {
3161        let freq = frequencies[peak_idx + 1];
3162        if freq > 1e-15 {
3163            let period = 1.0 / freq;
3164            let normalized_power = power_no_dc[peak_idx] / total_power.max(1e-15);
3165            candidates.push((period, normalized_power));
3166        }
3167    }
3168
3169    if candidates.is_empty() {
3170        return CfdAutoperiodResult {
3171            period: f64::NAN,
3172            confidence: 0.0,
3173            acf_validation: 0.0,
3174            periods: Vec::new(),
3175            confidences: Vec::new(),
3176        };
3177    }
3178
3179    // Step 4: Cluster candidates using density-based approach
3180    let clusters = cluster_periods(&candidates, tol, min_size);
3181
3182    if clusters.is_empty() {
3183        return CfdAutoperiodResult {
3184            period: f64::NAN,
3185            confidence: 0.0,
3186            acf_validation: 0.0,
3187            periods: Vec::new(),
3188            confidences: Vec::new(),
3189        };
3190    }
3191
3192    // Step 5: Validate cluster centers using ACF on original signal
3193    let acf = autocorrelation(data, max_lag);
3194
3195    let mut validated: Vec<(f64, f64, f64)> = Vec::new(); // (period, acf_score, power)
3196
3197    for (center, power_sum) in clusters {
3198        let acf_score = validate_period_acf(&acf, center, dt);
3199        if acf_score > 0.1 {
3200            // Only keep periods with reasonable ACF validation
3201            validated.push((center, acf_score, power_sum));
3202        }
3203    }
3204
3205    if validated.is_empty() {
3206        // Fall back to best cluster without ACF validation
3207        let (center, power_sum) = cluster_periods(&candidates, tol, min_size)
3208            .into_iter()
3209            .max_by(|a, b| a.1.partial_cmp(&b.1).unwrap_or(std::cmp::Ordering::Equal))
3210            .unwrap();
3211        return CfdAutoperiodResult {
3212            period: center,
3213            confidence: power_sum,
3214            acf_validation: 0.0,
3215            periods: vec![center],
3216            confidences: vec![power_sum],
3217        };
3218    }
3219
3220    // Sort by combined score (ACF * power)
3221    validated.sort_by(|a, b| {
3222        let score_a = a.1 * a.2;
3223        let score_b = b.1 * b.2;
3224        score_b
3225            .partial_cmp(&score_a)
3226            .unwrap_or(std::cmp::Ordering::Equal)
3227    });
3228
3229    let periods: Vec<f64> = validated.iter().map(|v| v.0).collect();
3230    let confidences: Vec<f64> = validated.iter().map(|v| v.1 * v.2).collect();
3231
3232    CfdAutoperiodResult {
3233        period: validated[0].0,
3234        confidence: validated[0].1 * validated[0].2,
3235        acf_validation: validated[0].1,
3236        periods,
3237        confidences,
3238    }
3239}
3240
3241/// CFDAutoperiod for functional data (matrix format)
3242pub fn cfd_autoperiod_fdata(
3243    data: &[f64],
3244    n: usize,
3245    m: usize,
3246    argvals: &[f64],
3247    cluster_tolerance: Option<f64>,
3248    min_cluster_size: Option<usize>,
3249) -> CfdAutoperiodResult {
3250    if n == 0 || m < 8 || argvals.len() != m {
3251        return CfdAutoperiodResult {
3252            period: f64::NAN,
3253            confidence: 0.0,
3254            acf_validation: 0.0,
3255            periods: Vec::new(),
3256            confidences: Vec::new(),
3257        };
3258    }
3259
3260    let mean_curve = compute_mean_curve(data, n, m);
3261    cfd_autoperiod(&mean_curve, argvals, cluster_tolerance, min_cluster_size)
3262}
3263
3264/// Cluster periods using a simple density-based approach
3265fn cluster_periods(candidates: &[(f64, f64)], tolerance: f64, min_size: usize) -> Vec<(f64, f64)> {
3266    if candidates.is_empty() {
3267        return Vec::new();
3268    }
3269
3270    // Sort candidates by period
3271    let mut sorted: Vec<(f64, f64)> = candidates.to_vec();
3272    sorted.sort_by(|a, b| a.0.partial_cmp(&b.0).unwrap_or(std::cmp::Ordering::Equal));
3273
3274    let mut clusters: Vec<(f64, f64)> = Vec::new(); // (center, total_power)
3275    let mut current_cluster: Vec<(f64, f64)> = vec![sorted[0]];
3276
3277    for &(period, power) in sorted.iter().skip(1) {
3278        let cluster_center =
3279            current_cluster.iter().map(|(p, _)| p).sum::<f64>() / current_cluster.len() as f64;
3280
3281        let rel_diff = (period - cluster_center).abs() / cluster_center.max(period);
3282
3283        if rel_diff <= tolerance {
3284            // Add to current cluster
3285            current_cluster.push((period, power));
3286        } else {
3287            // Finish current cluster and start new one
3288            if current_cluster.len() >= min_size {
3289                let center = current_cluster.iter().map(|(p, pw)| p * pw).sum::<f64>()
3290                    / current_cluster
3291                        .iter()
3292                        .map(|(_, pw)| pw)
3293                        .sum::<f64>()
3294                        .max(1e-15);
3295                let total_power: f64 = current_cluster.iter().map(|(_, pw)| pw).sum();
3296                clusters.push((center, total_power));
3297            }
3298            current_cluster = vec![(period, power)];
3299        }
3300    }
3301
3302    // Don't forget the last cluster
3303    if current_cluster.len() >= min_size {
3304        let center = current_cluster.iter().map(|(p, pw)| p * pw).sum::<f64>()
3305            / current_cluster
3306                .iter()
3307                .map(|(_, pw)| pw)
3308                .sum::<f64>()
3309                .max(1e-15);
3310        let total_power: f64 = current_cluster.iter().map(|(_, pw)| pw).sum();
3311        clusters.push((center, total_power));
3312    }
3313
3314    clusters
3315}
3316
3317// ============================================================================
3318// Lomb-Scargle Periodogram
3319// ============================================================================
3320
3321/// Result of Lomb-Scargle periodogram analysis.
3322#[derive(Debug, Clone)]
3323pub struct LombScargleResult {
3324    /// Test frequencies
3325    pub frequencies: Vec<f64>,
3326    /// Corresponding periods (1/frequency)
3327    pub periods: Vec<f64>,
3328    /// Normalized Lomb-Scargle power at each frequency
3329    pub power: Vec<f64>,
3330    /// Peak period (highest power)
3331    pub peak_period: f64,
3332    /// Peak frequency
3333    pub peak_frequency: f64,
3334    /// Peak power
3335    pub peak_power: f64,
3336    /// False alarm probability at peak (significance level)
3337    pub false_alarm_probability: f64,
3338    /// Significance level (1 - FAP)
3339    pub significance: f64,
3340}
3341
3342/// Compute Lomb-Scargle periodogram for irregularly sampled data.
3343///
3344/// The Lomb-Scargle periodogram is designed for unevenly-spaced time series
3345/// and reduces to the standard periodogram for evenly-spaced data.
3346///
3347/// # Algorithm
3348/// Following Scargle (1982) and Horne & Baliunas (1986):
3349/// 1. For each test frequency ω, compute the phase shift τ
3350/// 2. Compute the normalized power P(ω)
3351/// 3. Estimate false alarm probability using the exponential distribution
3352///
3353/// # Arguments
3354/// * `times` - Observation times (not necessarily evenly spaced)
3355/// * `values` - Observed values at each time
3356/// * `frequencies` - Optional frequencies to evaluate (cycles per unit time).
3357///   If None, automatically generates a frequency grid.
3358/// * `oversampling` - Oversampling factor for auto-generated frequency grid.
3359///   Default: 4.0. Higher values give finer frequency resolution.
3360/// * `nyquist_factor` - Maximum frequency as multiple of pseudo-Nyquist.
3361///   Default: 1.0.
3362///
3363/// # Returns
3364/// `LombScargleResult` with power spectrum and significance estimates.
3365///
3366/// # Example
3367/// ```rust
3368/// use fdars_core::seasonal::lomb_scargle;
3369/// use std::f64::consts::PI;
3370///
3371/// // Irregularly sampled sine wave
3372/// let times: Vec<f64> = vec![0.0, 0.3, 0.7, 1.2, 1.5, 2.1, 2.8, 3.0, 3.5, 4.0];
3373/// let period = 1.5;
3374/// let values: Vec<f64> = times.iter()
3375///     .map(|&t| (2.0 * PI * t / period).sin())
3376///     .collect();
3377///
3378/// let result = lomb_scargle(&times, &values, None, None, None);
3379/// assert!((result.peak_period - period).abs() < 0.2);
3380/// ```
3381pub fn lomb_scargle(
3382    times: &[f64],
3383    values: &[f64],
3384    frequencies: Option<&[f64]>,
3385    oversampling: Option<f64>,
3386    nyquist_factor: Option<f64>,
3387) -> LombScargleResult {
3388    let n = times.len();
3389    assert_eq!(n, values.len(), "times and values must have same length");
3390    assert!(n >= 3, "Need at least 3 data points");
3391
3392    // Compute mean and variance
3393    let mean_y: f64 = values.iter().sum::<f64>() / n as f64;
3394    let var_y: f64 = values.iter().map(|&y| (y - mean_y).powi(2)).sum::<f64>() / (n - 1) as f64;
3395
3396    // Generate frequency grid if not provided
3397    let freq_vec: Vec<f64>;
3398    let freqs = if let Some(f) = frequencies {
3399        f
3400    } else {
3401        freq_vec = generate_ls_frequencies(
3402            times,
3403            oversampling.unwrap_or(4.0),
3404            nyquist_factor.unwrap_or(1.0),
3405        );
3406        &freq_vec
3407    };
3408
3409    // Compute Lomb-Scargle power at each frequency
3410    let mut power = Vec::with_capacity(freqs.len());
3411
3412    for &freq in freqs.iter() {
3413        let omega = 2.0 * PI * freq;
3414        let p = lomb_scargle_single_freq(times, values, mean_y, var_y, omega);
3415        power.push(p);
3416    }
3417
3418    // Find peak
3419    let (peak_idx, &peak_power) = power
3420        .iter()
3421        .enumerate()
3422        .max_by(|(_, a), (_, b)| a.partial_cmp(b).unwrap())
3423        .unwrap_or((0, &0.0));
3424
3425    let peak_frequency = freqs.get(peak_idx).copied().unwrap_or(0.0);
3426    let peak_period = if peak_frequency > 0.0 {
3427        1.0 / peak_frequency
3428    } else {
3429        f64::INFINITY
3430    };
3431
3432    // Compute false alarm probability
3433    let n_indep = estimate_independent_frequencies(times, freqs.len());
3434    let fap = lomb_scargle_fap(peak_power, n_indep, n);
3435
3436    // Compute periods from frequencies
3437    let periods: Vec<f64> = freqs
3438        .iter()
3439        .map(|&f| if f > 0.0 { 1.0 / f } else { f64::INFINITY })
3440        .collect();
3441
3442    LombScargleResult {
3443        frequencies: freqs.to_vec(),
3444        periods,
3445        power,
3446        peak_period,
3447        peak_frequency,
3448        peak_power,
3449        false_alarm_probability: fap,
3450        significance: 1.0 - fap,
3451    }
3452}
3453
3454/// Compute Lomb-Scargle power at a single frequency.
3455///
3456/// Uses the Scargle (1982) normalization.
3457fn lomb_scargle_single_freq(
3458    times: &[f64],
3459    values: &[f64],
3460    mean_y: f64,
3461    var_y: f64,
3462    omega: f64,
3463) -> f64 {
3464    if var_y <= 0.0 || omega <= 0.0 {
3465        return 0.0;
3466    }
3467
3468    let n = times.len();
3469
3470    // Compute tau (phase shift) to make sine and cosine terms orthogonal
3471    let mut sum_sin2 = 0.0;
3472    let mut sum_cos2 = 0.0;
3473    for &t in times.iter() {
3474        let arg = 2.0 * omega * t;
3475        sum_sin2 += arg.sin();
3476        sum_cos2 += arg.cos();
3477    }
3478    let tau = (sum_sin2).atan2(sum_cos2) / (2.0 * omega);
3479
3480    // Compute sums for power calculation
3481    let mut ss = 0.0; // Sum of sin terms
3482    let mut sc = 0.0; // Sum of cos terms
3483    let mut css = 0.0; // Sum of cos^2
3484    let mut sss = 0.0; // Sum of sin^2
3485
3486    for i in 0..n {
3487        let y_centered = values[i] - mean_y;
3488        let arg = omega * (times[i] - tau);
3489        let c = arg.cos();
3490        let s = arg.sin();
3491
3492        sc += y_centered * c;
3493        ss += y_centered * s;
3494        css += c * c;
3495        sss += s * s;
3496    }
3497
3498    // Avoid division by zero
3499    let css = css.max(1e-15);
3500    let sss = sss.max(1e-15);
3501
3502    // Lomb-Scargle power (Scargle 1982 normalization)
3503    0.5 * (sc * sc / css + ss * ss / sss) / var_y
3504}
3505
3506/// Generate frequency grid for Lomb-Scargle.
3507///
3508/// The grid spans from 1/T_total to f_nyquist with oversampling.
3509fn generate_ls_frequencies(times: &[f64], oversampling: f64, nyquist_factor: f64) -> Vec<f64> {
3510    let n = times.len();
3511    if n < 2 {
3512        return vec![0.0];
3513    }
3514
3515    // Time span
3516    let t_min = times.iter().cloned().fold(f64::INFINITY, f64::min);
3517    let t_max = times.iter().cloned().fold(f64::NEG_INFINITY, f64::max);
3518    let t_span = (t_max - t_min).max(1e-10);
3519
3520    // Minimum frequency: one cycle over the observation span
3521    let f_min = 1.0 / t_span;
3522
3523    // Pseudo-Nyquist frequency for irregular data
3524    // Use average sampling rate as approximation
3525    let f_nyquist = 0.5 * (n - 1) as f64 / t_span;
3526
3527    // Maximum frequency
3528    let f_max = f_nyquist * nyquist_factor;
3529
3530    // Frequency resolution with oversampling
3531    let df = f_min / oversampling;
3532
3533    // Generate frequency grid
3534    let n_freq = ((f_max - f_min) / df).ceil() as usize + 1;
3535    let n_freq = n_freq.min(10000); // Cap to prevent memory issues
3536
3537    (0..n_freq).map(|i| f_min + i as f64 * df).collect()
3538}
3539
3540/// Estimate number of independent frequencies (for FAP calculation).
3541///
3542/// For irregularly sampled data, this is approximately the number of
3543/// data points (Horne & Baliunas 1986).
3544fn estimate_independent_frequencies(times: &[f64], n_freq: usize) -> usize {
3545    // A conservative estimate is min(n_data, n_frequencies)
3546    let n = times.len();
3547    n.min(n_freq)
3548}
3549
3550/// Compute false alarm probability for Lomb-Scargle peak.
3551///
3552/// Uses the exponential distribution approximation:
3553/// FAP ≈ 1 - (1 - exp(-z))^M
3554/// where z is the power and M is the number of independent frequencies.
3555fn lomb_scargle_fap(power: f64, n_indep: usize, _n_data: usize) -> f64 {
3556    if power <= 0.0 || n_indep == 0 {
3557        return 1.0;
3558    }
3559
3560    // Probability that a single frequency has power < z
3561    let prob_single = 1.0 - (-power).exp();
3562
3563    // Probability that all M frequencies have power < z
3564    // FAP = 1 - (1 - exp(-z))^M
3565    // For numerical stability, use log:
3566    // 1 - FAP = prob_single^M
3567    // FAP = 1 - exp(M * ln(prob_single))
3568
3569    if prob_single >= 1.0 {
3570        return 0.0; // Very significant
3571    }
3572    if prob_single <= 0.0 {
3573        return 1.0; // Not significant
3574    }
3575
3576    let log_prob = prob_single.ln();
3577    let log_cdf = n_indep as f64 * log_prob;
3578
3579    if log_cdf < -700.0 {
3580        0.0 // Numerical underflow, very significant
3581    } else {
3582        1.0 - log_cdf.exp()
3583    }
3584}
3585
3586/// Compute Lomb-Scargle periodogram for functional data (multiple curves).
3587///
3588/// Computes the periodogram for each curve and returns the result for the
3589/// mean curve or ensemble statistics.
3590///
3591/// # Arguments
3592/// * `data` - Column-major matrix (n x m) of functional data
3593/// * `n` - Number of samples (rows)
3594/// * `m` - Number of evaluation points (columns)
3595/// * `argvals` - Time points of length m
3596/// * `oversampling` - Oversampling factor. Default: 4.0
3597/// * `nyquist_factor` - Maximum frequency multiplier. Default: 1.0
3598///
3599/// # Returns
3600/// `LombScargleResult` computed from the mean curve.
3601pub fn lomb_scargle_fdata(
3602    data: &[f64],
3603    n: usize,
3604    m: usize,
3605    argvals: &[f64],
3606    oversampling: Option<f64>,
3607    nyquist_factor: Option<f64>,
3608) -> LombScargleResult {
3609    // Compute mean curve
3610    let mean_curve = compute_mean_curve(data, n, m);
3611
3612    // Run Lomb-Scargle on mean curve
3613    lomb_scargle(argvals, &mean_curve, None, oversampling, nyquist_factor)
3614}
3615
3616// ============================================================================
3617// Matrix Profile (STOMP Algorithm)
3618// ============================================================================
3619
3620/// Result of Matrix Profile computation.
3621#[derive(Debug, Clone)]
3622pub struct MatrixProfileResult {
3623    /// The matrix profile (minimum z-normalized distance for each position)
3624    pub profile: Vec<f64>,
3625    /// Index of the nearest neighbor for each position
3626    pub profile_index: Vec<usize>,
3627    /// Subsequence length used
3628    pub subsequence_length: usize,
3629    /// Detected periods from arc analysis
3630    pub detected_periods: Vec<f64>,
3631    /// Arc counts at each index distance (for period detection)
3632    pub arc_counts: Vec<usize>,
3633    /// Most prominent detected period
3634    pub primary_period: f64,
3635    /// Confidence score for primary period (based on arc prominence)
3636    pub confidence: f64,
3637}
3638
3639/// Compute Matrix Profile using STOMP algorithm (Scalable Time series Ordered-search Matrix Profile).
3640///
3641/// The Matrix Profile is a data structure that stores the z-normalized Euclidean distance
3642/// between each subsequence of a time series and its nearest neighbor. It enables efficient
3643/// motif discovery and anomaly detection.
3644///
3645/// # Algorithm (STOMP - Zhu et al. 2016)
3646/// 1. Pre-compute sliding mean and standard deviation using cumulative sums
3647/// 2. Use FFT to compute first row of distance matrix
3648/// 3. Update subsequent rows incrementally using the dot product update rule
3649/// 4. Track minimum distance and index at each position
3650///
3651/// # Arguments
3652/// * `values` - Time series values
3653/// * `subsequence_length` - Length of subsequences to compare (window size)
3654/// * `exclusion_zone` - Fraction of subsequence length to exclude around each position
3655///   to prevent trivial self-matches. Default: 0.5
3656///
3657/// # Returns
3658/// `MatrixProfileResult` with profile, indices, and detected periods.
3659///
3660/// # Example
3661/// ```rust
3662/// use fdars_core::seasonal::matrix_profile;
3663/// use std::f64::consts::PI;
3664///
3665/// // Periodic signal
3666/// let period = 20.0;
3667/// let values: Vec<f64> = (0..100)
3668///     .map(|i| (2.0 * PI * i as f64 / period).sin())
3669///     .collect();
3670///
3671/// let result = matrix_profile(&values, Some(15), None);
3672/// assert!((result.primary_period - period).abs() < 5.0);
3673/// ```
3674pub fn matrix_profile(
3675    values: &[f64],
3676    subsequence_length: Option<usize>,
3677    exclusion_zone: Option<f64>,
3678) -> MatrixProfileResult {
3679    let n = values.len();
3680
3681    // Default subsequence length: ~ 1/4 of series length, capped at reasonable range
3682    let m = subsequence_length.unwrap_or_else(|| {
3683        let default_m = n / 4;
3684        default_m.max(4).min(n / 2)
3685    });
3686
3687    assert!(m >= 3, "Subsequence length must be at least 3");
3688    assert!(
3689        m <= n / 2,
3690        "Subsequence length must be at most half the series length"
3691    );
3692
3693    let exclusion_zone = exclusion_zone.unwrap_or(0.5);
3694    let exclusion_radius = (m as f64 * exclusion_zone).ceil() as usize;
3695
3696    // Number of subsequences
3697    let profile_len = n - m + 1;
3698
3699    // Compute sliding statistics
3700    let (means, stds) = compute_sliding_stats(values, m);
3701
3702    // Compute the matrix profile using STOMP
3703    let (profile, profile_index) = stomp_core(values, m, &means, &stds, exclusion_radius);
3704
3705    // Perform arc analysis to detect periods
3706    let (arc_counts, detected_periods, primary_period, confidence) =
3707        analyze_arcs(&profile_index, profile_len, m);
3708
3709    MatrixProfileResult {
3710        profile,
3711        profile_index,
3712        subsequence_length: m,
3713        detected_periods,
3714        arc_counts,
3715        primary_period,
3716        confidence,
3717    }
3718}
3719
3720/// Compute sliding mean and standard deviation using cumulative sums.
3721///
3722/// This is O(n) and avoids numerical issues with naive implementations.
3723fn compute_sliding_stats(values: &[f64], m: usize) -> (Vec<f64>, Vec<f64>) {
3724    let n = values.len();
3725    let profile_len = n - m + 1;
3726
3727    // Compute cumulative sums
3728    let mut cumsum = vec![0.0; n + 1];
3729    let mut cumsum_sq = vec![0.0; n + 1];
3730
3731    for i in 0..n {
3732        cumsum[i + 1] = cumsum[i] + values[i];
3733        cumsum_sq[i + 1] = cumsum_sq[i] + values[i] * values[i];
3734    }
3735
3736    // Compute means and stds
3737    let mut means = Vec::with_capacity(profile_len);
3738    let mut stds = Vec::with_capacity(profile_len);
3739
3740    let m_f64 = m as f64;
3741
3742    for i in 0..profile_len {
3743        let sum = cumsum[i + m] - cumsum[i];
3744        let sum_sq = cumsum_sq[i + m] - cumsum_sq[i];
3745
3746        let mean = sum / m_f64;
3747        let variance = (sum_sq / m_f64) - mean * mean;
3748        let std = variance.max(0.0).sqrt();
3749
3750        means.push(mean);
3751        stds.push(std.max(1e-10)); // Prevent division by zero
3752    }
3753
3754    (means, stds)
3755}
3756
3757/// Core STOMP algorithm implementation.
3758///
3759/// Uses FFT for the first row and incremental updates for subsequent rows.
3760fn stomp_core(
3761    values: &[f64],
3762    m: usize,
3763    means: &[f64],
3764    stds: &[f64],
3765    exclusion_radius: usize,
3766) -> (Vec<f64>, Vec<usize>) {
3767    let n = values.len();
3768    let profile_len = n - m + 1;
3769
3770    // Initialize profile with infinity and index with 0
3771    let mut profile = vec![f64::INFINITY; profile_len];
3772    let mut profile_index = vec![0usize; profile_len];
3773
3774    // Compute first row using direct computation (could use FFT for large n)
3775    // QT[0,j] = sum(T[0:m] * T[j:j+m]) for each j
3776    let mut qt = vec![0.0; profile_len];
3777
3778    // First query subsequence
3779    for j in 0..profile_len {
3780        let mut dot = 0.0;
3781        for k in 0..m {
3782            dot += values[k] * values[j + k];
3783        }
3784        qt[j] = dot;
3785    }
3786
3787    // Process first row
3788    update_profile_row(
3789        0,
3790        &qt,
3791        means,
3792        stds,
3793        m,
3794        exclusion_radius,
3795        &mut profile,
3796        &mut profile_index,
3797    );
3798
3799    // Process subsequent rows using incremental updates
3800    for i in 1..profile_len {
3801        // Update QT using the sliding dot product update
3802        // QT[i,j] = QT[i-1,j-1] - T[i-1]*T[j-1] + T[i+m-1]*T[j+m-1]
3803        let mut qt_new = vec![0.0; profile_len];
3804
3805        // First element needs direct computation
3806        let mut dot = 0.0;
3807        for k in 0..m {
3808            dot += values[i + k] * values[k];
3809        }
3810        qt_new[0] = dot;
3811
3812        // Update rest using incremental formula
3813        for j in 1..profile_len {
3814            qt_new[j] =
3815                qt[j - 1] - values[i - 1] * values[j - 1] + values[i + m - 1] * values[j + m - 1];
3816        }
3817
3818        qt = qt_new;
3819
3820        // Update profile with this row
3821        update_profile_row(
3822            i,
3823            &qt,
3824            means,
3825            stds,
3826            m,
3827            exclusion_radius,
3828            &mut profile,
3829            &mut profile_index,
3830        );
3831    }
3832
3833    (profile, profile_index)
3834}
3835
3836/// Update profile with distances from row i.
3837fn update_profile_row(
3838    i: usize,
3839    qt: &[f64],
3840    means: &[f64],
3841    stds: &[f64],
3842    m: usize,
3843    exclusion_radius: usize,
3844    profile: &mut [f64],
3845    profile_index: &mut [usize],
3846) {
3847    let profile_len = profile.len();
3848    let m_f64 = m as f64;
3849
3850    for j in 0..profile_len {
3851        // Skip exclusion zone
3852        if i.abs_diff(j) <= exclusion_radius {
3853            continue;
3854        }
3855
3856        // Compute z-normalized distance
3857        // d = sqrt(2*m * (1 - (QT - m*mu_i*mu_j) / (m * sigma_i * sigma_j)))
3858        let numerator = qt[j] - m_f64 * means[i] * means[j];
3859        let denominator = m_f64 * stds[i] * stds[j];
3860
3861        let pearson = if denominator > 0.0 {
3862            (numerator / denominator).clamp(-1.0, 1.0)
3863        } else {
3864            0.0
3865        };
3866
3867        let dist_sq = 2.0 * m_f64 * (1.0 - pearson);
3868        let dist = dist_sq.max(0.0).sqrt();
3869
3870        // Update profile for position i
3871        if dist < profile[i] {
3872            profile[i] = dist;
3873            profile_index[i] = j;
3874        }
3875
3876        // Update profile for position j (symmetric)
3877        if dist < profile[j] {
3878            profile[j] = dist;
3879            profile_index[j] = i;
3880        }
3881    }
3882}
3883
3884/// Analyze profile index to detect periods using arc counting.
3885///
3886/// Arcs connect each position to its nearest neighbor. The distance between
3887/// connected positions reveals repeating patterns (periods).
3888fn analyze_arcs(
3889    profile_index: &[usize],
3890    profile_len: usize,
3891    m: usize,
3892) -> (Vec<usize>, Vec<f64>, f64, f64) {
3893    // Count arcs at each index distance
3894    let max_distance = profile_len;
3895    let mut arc_counts = vec![0usize; max_distance];
3896
3897    for (i, &j) in profile_index.iter().enumerate() {
3898        let distance = i.abs_diff(j);
3899        if distance < max_distance {
3900            arc_counts[distance] += 1;
3901        }
3902    }
3903
3904    // Find peaks in arc counts (candidate periods)
3905    let min_period = m / 2; // Minimum meaningful period
3906    let mut peaks: Vec<(usize, usize)> = Vec::new();
3907
3908    // Simple peak detection with minimum spacing
3909    for i in min_period..arc_counts.len().saturating_sub(1) {
3910        if arc_counts[i] > arc_counts[i.saturating_sub(1)]
3911            && arc_counts[i] > arc_counts[(i + 1).min(arc_counts.len() - 1)]
3912            && arc_counts[i] >= 3
3913        // Minimum count threshold
3914        {
3915            peaks.push((i, arc_counts[i]));
3916        }
3917    }
3918
3919    // Sort by count descending
3920    peaks.sort_by(|a, b| b.1.cmp(&a.1));
3921
3922    // Extract top periods
3923    let detected_periods: Vec<f64> = peaks.iter().take(5).map(|(p, _)| *p as f64).collect();
3924
3925    // Primary period and confidence
3926    let (primary_period, confidence) = if let Some(&(period, count)) = peaks.first() {
3927        // Confidence based on relative peak prominence
3928        let total_arcs: usize = arc_counts[min_period..].iter().sum();
3929        let conf = if total_arcs > 0 {
3930            count as f64 / total_arcs as f64
3931        } else {
3932            0.0
3933        };
3934        (period as f64, conf.min(1.0))
3935    } else {
3936        (0.0, 0.0)
3937    };
3938
3939    (arc_counts, detected_periods, primary_period, confidence)
3940}
3941
3942/// Compute Matrix Profile for functional data (multiple curves).
3943///
3944/// Computes the matrix profile for each curve and returns aggregated results.
3945///
3946/// # Arguments
3947/// * `data` - Column-major matrix (n x m) of functional data
3948/// * `n` - Number of samples (rows)
3949/// * `m` - Number of evaluation points (columns)
3950/// * `subsequence_length` - Length of subsequences. If None, automatically determined.
3951/// * `exclusion_zone` - Exclusion zone fraction. Default: 0.5
3952///
3953/// # Returns
3954/// `MatrixProfileResult` computed from the mean curve.
3955pub fn matrix_profile_fdata(
3956    data: &[f64],
3957    n: usize,
3958    m: usize,
3959    subsequence_length: Option<usize>,
3960    exclusion_zone: Option<f64>,
3961) -> MatrixProfileResult {
3962    // Compute mean curve
3963    let mean_curve = compute_mean_curve(data, n, m);
3964
3965    // Run matrix profile on mean curve
3966    matrix_profile(&mean_curve, subsequence_length, exclusion_zone)
3967}
3968
3969/// Detect seasonality using Matrix Profile analysis.
3970///
3971/// Returns true if significant periodicity is detected based on matrix profile analysis.
3972///
3973/// # Arguments
3974/// * `values` - Time series values
3975/// * `subsequence_length` - Length of subsequences to compare
3976/// * `confidence_threshold` - Minimum confidence for positive detection. Default: 0.1
3977///
3978/// # Returns
3979/// Tuple of (is_seasonal, detected_period, confidence)
3980pub fn matrix_profile_seasonality(
3981    values: &[f64],
3982    subsequence_length: Option<usize>,
3983    confidence_threshold: Option<f64>,
3984) -> (bool, f64, f64) {
3985    let result = matrix_profile(values, subsequence_length, None);
3986
3987    let threshold = confidence_threshold.unwrap_or(0.1);
3988    let is_seasonal = result.confidence >= threshold && result.primary_period > 0.0;
3989
3990    (is_seasonal, result.primary_period, result.confidence)
3991}
3992
3993// ============================================================================
3994// Singular Spectrum Analysis (SSA)
3995// ============================================================================
3996
3997/// Result of Singular Spectrum Analysis.
3998#[derive(Debug, Clone)]
3999pub struct SsaResult {
4000    /// Reconstructed trend component
4001    pub trend: Vec<f64>,
4002    /// Reconstructed seasonal/periodic component
4003    pub seasonal: Vec<f64>,
4004    /// Noise/residual component
4005    pub noise: Vec<f64>,
4006    /// Singular values from SVD (sorted descending)
4007    pub singular_values: Vec<f64>,
4008    /// Contribution of each component (proportion of variance)
4009    pub contributions: Vec<f64>,
4010    /// Window length used for embedding
4011    pub window_length: usize,
4012    /// Number of components extracted
4013    pub n_components: usize,
4014    /// Detected period (if any significant periodicity found)
4015    pub detected_period: f64,
4016    /// Confidence score for detected period
4017    pub confidence: f64,
4018}
4019
4020/// Singular Spectrum Analysis (SSA) for time series decomposition.
4021///
4022/// SSA is a model-free, non-parametric method for decomposing a time series
4023/// into trend, oscillatory (seasonal), and noise components using singular
4024/// value decomposition of the trajectory matrix.
4025///
4026/// # Algorithm
4027/// 1. **Embedding**: Convert series into trajectory matrix using sliding windows
4028/// 2. **Decomposition**: SVD of trajectory matrix
4029/// 3. **Grouping**: Identify trend vs. periodic vs. noise components
4030/// 4. **Reconstruction**: Diagonal averaging to recover time series
4031///
4032/// # Arguments
4033/// * `values` - Time series values
4034/// * `window_length` - Embedding window length (L). If None, uses L = min(n/2, 50).
4035///   Larger values capture longer-term patterns but need longer series.
4036/// * `n_components` - Number of components to extract. If None, uses 10.
4037/// * `trend_components` - Indices of components for trend (0-based). If None, auto-detect.
4038/// * `seasonal_components` - Indices of components for seasonal. If None, auto-detect.
4039///
4040/// # Returns
4041/// `SsaResult` with decomposed components and diagnostics.
4042///
4043/// # Example
4044/// ```rust
4045/// use fdars_core::seasonal::ssa;
4046/// use std::f64::consts::PI;
4047///
4048/// // Signal with trend + seasonal + noise
4049/// let n = 100;
4050/// let values: Vec<f64> = (0..n)
4051///     .map(|i| {
4052///         let t = i as f64;
4053///         0.01 * t + (2.0 * PI * t / 12.0).sin() + 0.1 * (i as f64 * 0.1).sin()
4054///     })
4055///     .collect();
4056///
4057/// let result = ssa(&values, None, None, None, None);
4058/// assert!(result.detected_period > 0.0);
4059/// ```
4060pub fn ssa(
4061    values: &[f64],
4062    window_length: Option<usize>,
4063    n_components: Option<usize>,
4064    trend_components: Option<&[usize]>,
4065    seasonal_components: Option<&[usize]>,
4066) -> SsaResult {
4067    let n = values.len();
4068
4069    // Default window length: min(n/2, 50)
4070    let l = window_length.unwrap_or_else(|| (n / 2).clamp(2, 50));
4071
4072    if n < 4 || l < 2 || l > n / 2 {
4073        return SsaResult {
4074            trend: values.to_vec(),
4075            seasonal: vec![0.0; n],
4076            noise: vec![0.0; n],
4077            singular_values: vec![],
4078            contributions: vec![],
4079            window_length: l,
4080            n_components: 0,
4081            detected_period: 0.0,
4082            confidence: 0.0,
4083        };
4084    }
4085
4086    // Number of columns in trajectory matrix
4087    let k = n - l + 1;
4088
4089    // Step 1: Embedding - create trajectory matrix (L x K)
4090    let trajectory = embed_trajectory(values, l, k);
4091
4092    // Step 2: SVD decomposition
4093    let (u, sigma, vt) = svd_decompose(&trajectory, l, k);
4094
4095    // Determine number of components to use
4096    let max_components = sigma.len();
4097    let n_comp = n_components.unwrap_or(10).min(max_components);
4098
4099    // Compute contributions (proportion of total variance)
4100    let total_var: f64 = sigma.iter().map(|&s| s * s).sum();
4101    let contributions: Vec<f64> = sigma
4102        .iter()
4103        .take(n_comp)
4104        .map(|&s| s * s / total_var.max(1e-15))
4105        .collect();
4106
4107    // Step 3: Grouping - identify trend and seasonal components
4108    let (trend_idx, seasonal_idx, detected_period, confidence) =
4109        if trend_components.is_some() || seasonal_components.is_some() {
4110            // Use provided groupings
4111            let t_idx: Vec<usize> = trend_components.map(|v| v.to_vec()).unwrap_or_default();
4112            let s_idx: Vec<usize> = seasonal_components.map(|v| v.to_vec()).unwrap_or_default();
4113            (t_idx, s_idx, 0.0, 0.0)
4114        } else {
4115            // Auto-detect groupings
4116            auto_group_ssa_components(&u, &sigma, l, k, n_comp)
4117        };
4118
4119    // Step 4: Reconstruction via diagonal averaging
4120    let trend = reconstruct_grouped(&u, &sigma, &vt, l, k, n, &trend_idx);
4121    let seasonal = reconstruct_grouped(&u, &sigma, &vt, l, k, n, &seasonal_idx);
4122
4123    // Noise is the remainder
4124    let noise: Vec<f64> = values
4125        .iter()
4126        .zip(trend.iter())
4127        .zip(seasonal.iter())
4128        .map(|((&y, &t), &s)| y - t - s)
4129        .collect();
4130
4131    SsaResult {
4132        trend,
4133        seasonal,
4134        noise,
4135        singular_values: sigma.into_iter().take(n_comp).collect(),
4136        contributions,
4137        window_length: l,
4138        n_components: n_comp,
4139        detected_period,
4140        confidence,
4141    }
4142}
4143
4144/// Create trajectory matrix by embedding the time series.
4145fn embed_trajectory(values: &[f64], l: usize, k: usize) -> Vec<f64> {
4146    // Trajectory matrix is L x K, stored column-major
4147    let mut trajectory = vec![0.0; l * k];
4148
4149    for j in 0..k {
4150        for i in 0..l {
4151            trajectory[i + j * l] = values[i + j];
4152        }
4153    }
4154
4155    trajectory
4156}
4157
4158/// SVD decomposition of trajectory matrix using nalgebra.
4159fn svd_decompose(trajectory: &[f64], l: usize, k: usize) -> (Vec<f64>, Vec<f64>, Vec<f64>) {
4160    use nalgebra::{DMatrix, SVD};
4161
4162    // Create nalgebra matrix (column-major)
4163    let mat = DMatrix::from_column_slice(l, k, trajectory);
4164
4165    // Compute SVD
4166    let svd = SVD::new(mat, true, true);
4167
4168    // Extract components
4169    let u_mat = svd.u.unwrap();
4170    let vt_mat = svd.v_t.unwrap();
4171    let sigma = svd.singular_values;
4172
4173    // Convert to flat vectors
4174    let u: Vec<f64> = u_mat.iter().cloned().collect();
4175    let sigma_vec: Vec<f64> = sigma.iter().cloned().collect();
4176    let vt: Vec<f64> = vt_mat.iter().cloned().collect();
4177
4178    (u, sigma_vec, vt)
4179}
4180
4181/// Auto-detect trend and seasonal component groupings.
4182fn auto_group_ssa_components(
4183    u: &[f64],
4184    sigma: &[f64],
4185    l: usize,
4186    _k: usize,
4187    n_comp: usize,
4188) -> (Vec<usize>, Vec<usize>, f64, f64) {
4189    let mut trend_idx = Vec::new();
4190    let mut seasonal_idx = Vec::new();
4191    let mut detected_period = 0.0;
4192    let mut confidence = 0.0;
4193
4194    // Analyze each component
4195    for i in 0..n_comp.min(sigma.len()) {
4196        // Extract i-th column of U (left singular vector)
4197        let u_col: Vec<f64> = (0..l).map(|j| u[j + i * l]).collect();
4198
4199        // Check if component is trend-like (monotonic or slowly varying)
4200        let is_trend = is_trend_component(&u_col);
4201
4202        // Check if component is periodic
4203        let (is_periodic, period) = is_periodic_component(&u_col);
4204
4205        if is_trend && trend_idx.len() < 2 {
4206            trend_idx.push(i);
4207        } else if is_periodic {
4208            seasonal_idx.push(i);
4209            if detected_period == 0.0 && period > 0.0 {
4210                detected_period = period;
4211                confidence = sigma[i] / sigma[0].max(1e-15);
4212            }
4213        }
4214    }
4215
4216    // If no trend detected, use first component
4217    if trend_idx.is_empty() && n_comp > 0 {
4218        trend_idx.push(0);
4219    }
4220
4221    // If no seasonal detected, use components 2-3 (often harmonic pairs)
4222    if seasonal_idx.is_empty() && n_comp >= 3 {
4223        seasonal_idx.push(1);
4224        if n_comp > 2 {
4225            seasonal_idx.push(2);
4226        }
4227    }
4228
4229    (trend_idx, seasonal_idx, detected_period, confidence)
4230}
4231
4232/// Check if a singular vector represents a trend component.
4233fn is_trend_component(u_col: &[f64]) -> bool {
4234    let n = u_col.len();
4235    if n < 3 {
4236        return false;
4237    }
4238
4239    // Count sign changes in the vector
4240    let mut sign_changes = 0;
4241    for i in 1..n {
4242        if u_col[i] * u_col[i - 1] < 0.0 {
4243            sign_changes += 1;
4244        }
4245    }
4246
4247    // Trend components have very few sign changes
4248    sign_changes <= n / 10
4249}
4250
4251/// Check if a singular vector represents a periodic component.
4252fn is_periodic_component(u_col: &[f64]) -> (bool, f64) {
4253    let n = u_col.len();
4254    if n < 4 {
4255        return (false, 0.0);
4256    }
4257
4258    // Use autocorrelation to detect periodicity
4259    let mean: f64 = u_col.iter().sum::<f64>() / n as f64;
4260    let centered: Vec<f64> = u_col.iter().map(|&x| x - mean).collect();
4261
4262    let var: f64 = centered.iter().map(|&x| x * x).sum();
4263    if var < 1e-15 {
4264        return (false, 0.0);
4265    }
4266
4267    // Find first significant peak in autocorrelation
4268    let mut best_period = 0.0;
4269    let mut best_acf = 0.0;
4270
4271    for lag in 2..n / 2 {
4272        let mut acf = 0.0;
4273        for i in 0..(n - lag) {
4274            acf += centered[i] * centered[i + lag];
4275        }
4276        acf /= var;
4277
4278        if acf > best_acf && acf > 0.3 {
4279            best_acf = acf;
4280            best_period = lag as f64;
4281        }
4282    }
4283
4284    let is_periodic = best_acf > 0.3 && best_period > 0.0;
4285    (is_periodic, best_period)
4286}
4287
4288/// Reconstruct time series from grouped components via diagonal averaging.
4289fn reconstruct_grouped(
4290    u: &[f64],
4291    sigma: &[f64],
4292    vt: &[f64],
4293    l: usize,
4294    k: usize,
4295    n: usize,
4296    group_idx: &[usize],
4297) -> Vec<f64> {
4298    if group_idx.is_empty() {
4299        return vec![0.0; n];
4300    }
4301
4302    // Sum of rank-1 matrices for this group
4303    let mut grouped_matrix = vec![0.0; l * k];
4304
4305    for &idx in group_idx {
4306        if idx >= sigma.len() {
4307            continue;
4308        }
4309
4310        let s = sigma[idx];
4311
4312        // Add s * u_i * v_i^T
4313        for j in 0..k {
4314            for i in 0..l {
4315                let u_val = u[i + idx * l];
4316                let v_val = vt[idx + j * sigma.len().min(l)]; // v_t is stored as K x min(L,K)
4317                grouped_matrix[i + j * l] += s * u_val * v_val;
4318            }
4319        }
4320    }
4321
4322    // Diagonal averaging (Hankelization)
4323    diagonal_average(&grouped_matrix, l, k, n)
4324}
4325
4326/// Diagonal averaging to convert trajectory matrix back to time series.
4327fn diagonal_average(matrix: &[f64], l: usize, k: usize, n: usize) -> Vec<f64> {
4328    let mut result = vec![0.0; n];
4329    let mut counts = vec![0.0; n];
4330
4331    // Average along anti-diagonals
4332    for j in 0..k {
4333        for i in 0..l {
4334            let idx = i + j; // Position in original series
4335            if idx < n {
4336                result[idx] += matrix[i + j * l];
4337                counts[idx] += 1.0;
4338            }
4339        }
4340    }
4341
4342    // Normalize by counts
4343    for i in 0..n {
4344        if counts[i] > 0.0 {
4345            result[i] /= counts[i];
4346        }
4347    }
4348
4349    result
4350}
4351
4352/// Compute SSA for functional data (multiple curves).
4353///
4354/// # Arguments
4355/// * `data` - Column-major matrix (n x m) of functional data
4356/// * `n` - Number of samples (rows)
4357/// * `m` - Number of evaluation points (columns)
4358/// * `window_length` - SSA window length. If None, auto-determined.
4359/// * `n_components` - Number of SSA components. Default: 10.
4360///
4361/// # Returns
4362/// `SsaResult` computed from the mean curve.
4363pub fn ssa_fdata(
4364    data: &[f64],
4365    n: usize,
4366    m: usize,
4367    window_length: Option<usize>,
4368    n_components: Option<usize>,
4369) -> SsaResult {
4370    // Compute mean curve
4371    let mean_curve = compute_mean_curve(data, n, m);
4372
4373    // Run SSA on mean curve
4374    ssa(&mean_curve, window_length, n_components, None, None)
4375}
4376
4377/// Detect seasonality using SSA.
4378///
4379/// # Arguments
4380/// * `values` - Time series values
4381/// * `window_length` - SSA window length
4382/// * `confidence_threshold` - Minimum confidence for positive detection
4383///
4384/// # Returns
4385/// Tuple of (is_seasonal, detected_period, confidence)
4386pub fn ssa_seasonality(
4387    values: &[f64],
4388    window_length: Option<usize>,
4389    confidence_threshold: Option<f64>,
4390) -> (bool, f64, f64) {
4391    let result = ssa(values, window_length, None, None, None);
4392
4393    let threshold = confidence_threshold.unwrap_or(0.1);
4394    let is_seasonal = result.confidence >= threshold && result.detected_period > 0.0;
4395
4396    (is_seasonal, result.detected_period, result.confidence)
4397}
4398
4399#[cfg(test)]
4400mod tests {
4401    use super::*;
4402    use std::f64::consts::PI;
4403
4404    fn generate_sine(n: usize, m: usize, period: f64, argvals: &[f64]) -> Vec<f64> {
4405        let mut data = vec![0.0; n * m];
4406        for i in 0..n {
4407            for j in 0..m {
4408                data[i + j * n] = (2.0 * PI * argvals[j] / period).sin();
4409            }
4410        }
4411        data
4412    }
4413
4414    #[test]
4415    fn test_period_estimation_fft() {
4416        let m = 200;
4417        let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.1).collect();
4418        let period = 2.0;
4419        let data = generate_sine(1, m, period, &argvals);
4420
4421        let estimate = estimate_period_fft(&data, 1, m, &argvals);
4422        assert!((estimate.period - period).abs() < 0.2);
4423        assert!(estimate.confidence > 1.0);
4424    }
4425
4426    #[test]
4427    fn test_peak_detection() {
4428        let m = 100;
4429        let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.1).collect();
4430        let period = 2.0;
4431        let data = generate_sine(1, m, period, &argvals);
4432
4433        let result = detect_peaks(&data, 1, m, &argvals, Some(1.5), None, false, None);
4434
4435        // Should find approximately 5 peaks (10 / 2)
4436        assert!(!result.peaks[0].is_empty());
4437        assert!((result.mean_period - period).abs() < 0.3);
4438    }
4439
4440    #[test]
4441    fn test_peak_detection_known_sine() {
4442        // Pure sine wave: sin(2*pi*t/2) on [0, 10]
4443        // Peaks occur at t = period/4 + k*period = 0.5, 2.5, 4.5, 6.5, 8.5
4444        let m = 200; // High resolution for accurate detection
4445        let period = 2.0;
4446        let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 10.0 / (m - 1) as f64).collect();
4447        let data: Vec<f64> = argvals
4448            .iter()
4449            .map(|&t| (2.0 * std::f64::consts::PI * t / period).sin())
4450            .collect();
4451
4452        let result = detect_peaks(&data, 1, m, &argvals, None, None, false, None);
4453
4454        // Should find exactly 5 peaks
4455        assert_eq!(
4456            result.peaks[0].len(),
4457            5,
4458            "Expected 5 peaks, got {}. Peak times: {:?}",
4459            result.peaks[0].len(),
4460            result.peaks[0].iter().map(|p| p.time).collect::<Vec<_>>()
4461        );
4462
4463        // Check peak locations are close to expected
4464        let expected_times = [0.5, 2.5, 4.5, 6.5, 8.5];
4465        for (peak, expected) in result.peaks[0].iter().zip(expected_times.iter()) {
4466            assert!(
4467                (peak.time - expected).abs() < 0.15,
4468                "Peak at {:.3} not close to expected {:.3}",
4469                peak.time,
4470                expected
4471            );
4472        }
4473
4474        // Check mean period
4475        assert!(
4476            (result.mean_period - period).abs() < 0.1,
4477            "Mean period {:.3} not close to expected {:.3}",
4478            result.mean_period,
4479            period
4480        );
4481    }
4482
4483    #[test]
4484    fn test_peak_detection_with_min_distance() {
4485        // Same sine wave but with min_distance constraint
4486        let m = 200;
4487        let period = 2.0;
4488        let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 10.0 / (m - 1) as f64).collect();
4489        let data: Vec<f64> = argvals
4490            .iter()
4491            .map(|&t| (2.0 * std::f64::consts::PI * t / period).sin())
4492            .collect();
4493
4494        // min_distance = 1.5 should still find all 5 peaks (spacing = 2.0)
4495        let result = detect_peaks(&data, 1, m, &argvals, Some(1.5), None, false, None);
4496        assert_eq!(
4497            result.peaks[0].len(),
4498            5,
4499            "With min_distance=1.5, expected 5 peaks, got {}",
4500            result.peaks[0].len()
4501        );
4502
4503        // min_distance = 2.5 should find fewer peaks
4504        let result2 = detect_peaks(&data, 1, m, &argvals, Some(2.5), None, false, None);
4505        assert!(
4506            result2.peaks[0].len() < 5,
4507            "With min_distance=2.5, expected fewer than 5 peaks, got {}",
4508            result2.peaks[0].len()
4509        );
4510    }
4511
4512    #[test]
4513    fn test_peak_detection_period_1() {
4514        // Higher frequency: sin(2*pi*t/1) on [0, 10]
4515        // Peaks at t = 0.25, 1.25, 2.25, ..., 9.25 (10 peaks)
4516        let m = 400; // Higher resolution for higher frequency
4517        let period = 1.0;
4518        let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 10.0 / (m - 1) as f64).collect();
4519        let data: Vec<f64> = argvals
4520            .iter()
4521            .map(|&t| (2.0 * std::f64::consts::PI * t / period).sin())
4522            .collect();
4523
4524        let result = detect_peaks(&data, 1, m, &argvals, None, None, false, None);
4525
4526        // Should find 10 peaks
4527        assert_eq!(
4528            result.peaks[0].len(),
4529            10,
4530            "Expected 10 peaks, got {}",
4531            result.peaks[0].len()
4532        );
4533
4534        // Check mean period
4535        assert!(
4536            (result.mean_period - period).abs() < 0.1,
4537            "Mean period {:.3} not close to expected {:.3}",
4538            result.mean_period,
4539            period
4540        );
4541    }
4542
4543    #[test]
4544    fn test_peak_detection_shifted_sine() {
4545        // Shifted sine: sin(2*pi*t/2) + 1 on [0, 10]
4546        // Same peak locations, just shifted up
4547        let m = 200;
4548        let period = 2.0;
4549        let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 10.0 / (m - 1) as f64).collect();
4550        let data: Vec<f64> = argvals
4551            .iter()
4552            .map(|&t| (2.0 * std::f64::consts::PI * t / period).sin() + 1.0)
4553            .collect();
4554
4555        let result = detect_peaks(&data, 1, m, &argvals, None, None, false, None);
4556
4557        // Should still find 5 peaks
4558        assert_eq!(
4559            result.peaks[0].len(),
4560            5,
4561            "Expected 5 peaks for shifted sine, got {}",
4562            result.peaks[0].len()
4563        );
4564
4565        // Peak values should be around 2.0 (max of sin + 1)
4566        for peak in &result.peaks[0] {
4567            assert!(
4568                (peak.value - 2.0).abs() < 0.05,
4569                "Peak value {:.3} not close to expected 2.0",
4570                peak.value
4571            );
4572        }
4573    }
4574
4575    #[test]
4576    fn test_peak_detection_prominence() {
4577        // Create signal with peaks of different heights
4578        // Large peaks at odd positions, small peaks at even positions
4579        let m = 200;
4580        let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 10.0 / (m - 1) as f64).collect();
4581        let data: Vec<f64> = argvals
4582            .iter()
4583            .map(|&t| {
4584                let base = (2.0 * std::f64::consts::PI * t / 2.0).sin();
4585                // Add small ripples
4586                let ripple = 0.1 * (2.0 * std::f64::consts::PI * t * 4.0).sin();
4587                base + ripple
4588            })
4589            .collect();
4590
4591        // Without prominence filter, may find extra peaks from ripples
4592        let result_no_filter = detect_peaks(&data, 1, m, &argvals, None, None, false, None);
4593
4594        // With prominence filter, should only find major peaks
4595        let result_filtered = detect_peaks(&data, 1, m, &argvals, None, Some(0.5), false, None);
4596
4597        // Filtered should have fewer or equal peaks
4598        assert!(
4599            result_filtered.peaks[0].len() <= result_no_filter.peaks[0].len(),
4600            "Prominence filter should reduce peak count"
4601        );
4602    }
4603
4604    #[test]
4605    fn test_peak_detection_different_amplitudes() {
4606        // Test with various amplitudes: 0.5, 1.0, 2.0, 5.0
4607        let m = 200;
4608        let period = 2.0;
4609        let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 10.0 / (m - 1) as f64).collect();
4610
4611        for amplitude in [0.5, 1.0, 2.0, 5.0] {
4612            let data: Vec<f64> = argvals
4613                .iter()
4614                .map(|&t| amplitude * (2.0 * std::f64::consts::PI * t / period).sin())
4615                .collect();
4616
4617            let result = detect_peaks(&data, 1, m, &argvals, None, None, false, None);
4618
4619            assert_eq!(
4620                result.peaks[0].len(),
4621                5,
4622                "Amplitude {} should still find 5 peaks",
4623                amplitude
4624            );
4625
4626            // Peak values should be close to amplitude
4627            for peak in &result.peaks[0] {
4628                assert!(
4629                    (peak.value - amplitude).abs() < 0.1,
4630                    "Peak value {:.3} should be close to amplitude {}",
4631                    peak.value,
4632                    amplitude
4633                );
4634            }
4635        }
4636    }
4637
4638    #[test]
4639    fn test_peak_detection_varying_frequency() {
4640        // Signal with varying frequency: chirp-like signal
4641        // Peaks get closer together over time
4642        let m = 400;
4643        let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 10.0 / (m - 1) as f64).collect();
4644
4645        // Frequency increases linearly: f(t) = 0.5 + 0.1*t
4646        // Phase integral: phi(t) = 2*pi * (0.5*t + 0.05*t^2)
4647        let data: Vec<f64> = argvals
4648            .iter()
4649            .map(|&t| {
4650                let phase = 2.0 * std::f64::consts::PI * (0.5 * t + 0.05 * t * t);
4651                phase.sin()
4652            })
4653            .collect();
4654
4655        let result = detect_peaks(&data, 1, m, &argvals, None, None, false, None);
4656
4657        // Should find multiple peaks with decreasing spacing
4658        assert!(
4659            result.peaks[0].len() >= 5,
4660            "Should find at least 5 peaks, got {}",
4661            result.peaks[0].len()
4662        );
4663
4664        // Verify inter-peak distances decrease over time
4665        let distances = &result.inter_peak_distances[0];
4666        if distances.len() >= 3 {
4667            // Later peaks should be closer than earlier peaks
4668            let early_avg = (distances[0] + distances[1]) / 2.0;
4669            let late_avg = (distances[distances.len() - 2] + distances[distances.len() - 1]) / 2.0;
4670            assert!(
4671                late_avg < early_avg,
4672                "Later peaks should be closer: early avg={:.3}, late avg={:.3}",
4673                early_avg,
4674                late_avg
4675            );
4676        }
4677    }
4678
4679    #[test]
4680    fn test_peak_detection_sum_of_sines() {
4681        // Sum of two sine waves with different periods creates non-uniform peak spacing
4682        // y = sin(2*pi*t/2) + 0.5*sin(2*pi*t/3)
4683        let m = 300;
4684        let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 12.0 / (m - 1) as f64).collect();
4685
4686        let data: Vec<f64> = argvals
4687            .iter()
4688            .map(|&t| {
4689                (2.0 * std::f64::consts::PI * t / 2.0).sin()
4690                    + 0.5 * (2.0 * std::f64::consts::PI * t / 3.0).sin()
4691            })
4692            .collect();
4693
4694        let result = detect_peaks(&data, 1, m, &argvals, Some(1.0), None, false, None);
4695
4696        // Should find peaks (exact count depends on interference pattern)
4697        assert!(
4698            result.peaks[0].len() >= 4,
4699            "Should find at least 4 peaks, got {}",
4700            result.peaks[0].len()
4701        );
4702
4703        // Inter-peak distances should vary
4704        let distances = &result.inter_peak_distances[0];
4705        if distances.len() >= 2 {
4706            let min_dist = distances.iter().cloned().fold(f64::INFINITY, f64::min);
4707            let max_dist = distances.iter().cloned().fold(0.0, f64::max);
4708            assert!(
4709                max_dist > min_dist * 1.1,
4710                "Distances should vary: min={:.3}, max={:.3}",
4711                min_dist,
4712                max_dist
4713            );
4714        }
4715    }
4716
4717    #[test]
4718    fn test_seasonal_strength() {
4719        let m = 200;
4720        let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.1).collect();
4721        let period = 2.0;
4722        let data = generate_sine(1, m, period, &argvals);
4723
4724        let strength = seasonal_strength_variance(&data, 1, m, &argvals, period, 3);
4725        // Pure sine should have high seasonal strength
4726        assert!(strength > 0.8);
4727
4728        let strength_spectral = seasonal_strength_spectral(&data, 1, m, &argvals, period);
4729        assert!(strength_spectral > 0.5);
4730    }
4731
4732    #[test]
4733    fn test_instantaneous_period() {
4734        let m = 200;
4735        let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.1).collect();
4736        let period = 2.0;
4737        let data = generate_sine(1, m, period, &argvals);
4738
4739        let result = instantaneous_period(&data, 1, m, &argvals);
4740
4741        // Check that instantaneous period is close to true period (away from boundaries)
4742        let mid_period = result.period[m / 2];
4743        assert!(
4744            (mid_period - period).abs() < 0.5,
4745            "Expected period ~{}, got {}",
4746            period,
4747            mid_period
4748        );
4749    }
4750
4751    #[test]
4752    fn test_peak_timing_analysis() {
4753        // Generate 5 cycles of sine with period 2
4754        let m = 500;
4755        let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.02).collect();
4756        let period = 2.0;
4757        let data = generate_sine(1, m, period, &argvals);
4758
4759        let result = analyze_peak_timing(&data, 1, m, &argvals, period, Some(11));
4760
4761        // Should find approximately 5 peaks
4762        assert!(!result.peak_times.is_empty());
4763        // Normalized timing should be around 0.25 (peak of sin at π/2)
4764        assert!(result.mean_timing.is_finite());
4765        // Pure sine should have low timing variability
4766        assert!(result.std_timing < 0.1 || result.std_timing.is_nan());
4767    }
4768
4769    #[test]
4770    fn test_seasonality_classification() {
4771        let m = 400;
4772        let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.05).collect();
4773        let period = 2.0;
4774        let data = generate_sine(1, m, period, &argvals);
4775
4776        let result = classify_seasonality(&data, 1, m, &argvals, period, None, None);
4777
4778        assert!(result.is_seasonal);
4779        assert!(result.seasonal_strength > 0.5);
4780        assert!(matches!(
4781            result.classification,
4782            SeasonalType::StableSeasonal | SeasonalType::VariableTiming
4783        ));
4784    }
4785
4786    #[test]
4787    fn test_otsu_threshold() {
4788        // Bimodal distribution: mix of low (0.1-0.2) and high (0.7-0.9) values
4789        let values = vec![
4790            0.1, 0.12, 0.15, 0.18, 0.11, 0.14, 0.7, 0.75, 0.8, 0.85, 0.9, 0.72,
4791        ];
4792
4793        let threshold = otsu_threshold(&values);
4794
4795        // Threshold should be between the two modes
4796        // Due to small sample size, Otsu's method may not find optimal threshold
4797        // Just verify it returns a reasonable value in the data range
4798        assert!(threshold >= 0.1, "Threshold {} should be >= 0.1", threshold);
4799        assert!(threshold <= 0.9, "Threshold {} should be <= 0.9", threshold);
4800    }
4801
4802    #[test]
4803    fn test_gcv_fourier_nbasis_selection() {
4804        let m = 100;
4805        let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.1).collect();
4806
4807        // Noisy sine wave
4808        let mut data = vec![0.0; m];
4809        for j in 0..m {
4810            data[j] = (2.0 * PI * argvals[j] / 2.0).sin() + 0.1 * (j as f64 * 0.3).sin();
4811        }
4812
4813        let nbasis = crate::basis::select_fourier_nbasis_gcv(&data, 1, m, &argvals, 5, 25);
4814
4815        // nbasis should be reasonable (between min and max)
4816        assert!(nbasis >= 5);
4817        assert!(nbasis <= 25);
4818    }
4819
4820    #[test]
4821    fn test_detect_multiple_periods() {
4822        let m = 400;
4823        let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.05).collect(); // 0 to 20
4824
4825        // Signal with two periods: 2 and 7
4826        let period1 = 2.0;
4827        let period2 = 7.0;
4828        let mut data = vec![0.0; m];
4829        for j in 0..m {
4830            data[j] = (2.0 * PI * argvals[j] / period1).sin()
4831                + 0.6 * (2.0 * PI * argvals[j] / period2).sin();
4832        }
4833
4834        // Use higher min_strength threshold to properly stop after real periods
4835        let detected = detect_multiple_periods(&data, 1, m, &argvals, 5, 0.4, 0.20);
4836
4837        // Should detect exactly 2 periods with these thresholds
4838        assert!(
4839            detected.len() >= 2,
4840            "Expected at least 2 periods, found {}",
4841            detected.len()
4842        );
4843
4844        // Check that both periods were detected (order depends on amplitude)
4845        let periods: Vec<f64> = detected.iter().map(|d| d.period).collect();
4846        let has_period1 = periods.iter().any(|&p| (p - period1).abs() < 0.3);
4847        let has_period2 = periods.iter().any(|&p| (p - period2).abs() < 0.5);
4848
4849        assert!(
4850            has_period1,
4851            "Expected to find period ~{}, got {:?}",
4852            period1, periods
4853        );
4854        assert!(
4855            has_period2,
4856            "Expected to find period ~{}, got {:?}",
4857            period2, periods
4858        );
4859
4860        // Verify first detected has higher amplitude (amplitude 1.0 vs 0.6)
4861        assert!(
4862            detected[0].amplitude > detected[1].amplitude,
4863            "First detected should have higher amplitude"
4864        );
4865
4866        // Each detected period should have strength and confidence info
4867        for d in &detected {
4868            assert!(
4869                d.strength > 0.0,
4870                "Detected period should have positive strength"
4871            );
4872            assert!(
4873                d.confidence > 0.0,
4874                "Detected period should have positive confidence"
4875            );
4876            assert!(
4877                d.amplitude > 0.0,
4878                "Detected period should have positive amplitude"
4879            );
4880        }
4881    }
4882
4883    // ========================================================================
4884    // Amplitude Modulation Detection Tests
4885    // ========================================================================
4886
4887    #[test]
4888    fn test_amplitude_modulation_stable() {
4889        // Constant amplitude seasonal signal - should detect as Stable
4890        let m = 200;
4891        let period = 0.2;
4892        let argvals: Vec<f64> = (0..m).map(|i| i as f64 / (m - 1) as f64).collect();
4893
4894        // Constant amplitude sine wave
4895        let data: Vec<f64> = argvals
4896            .iter()
4897            .map(|&t| (2.0 * PI * t / period).sin())
4898            .collect();
4899
4900        let result = detect_amplitude_modulation(
4901            &data, 1, m, &argvals, period, 0.15, // modulation threshold
4902            0.3,  // seasonality threshold
4903        );
4904
4905        eprintln!(
4906            "Stable test: is_seasonal={}, has_modulation={}, modulation_score={:.4}, amplitude_trend={:.4}, type={:?}",
4907            result.is_seasonal, result.has_modulation, result.modulation_score, result.amplitude_trend, result.modulation_type
4908        );
4909
4910        assert!(result.is_seasonal, "Should detect seasonality");
4911        assert!(
4912            !result.has_modulation,
4913            "Constant amplitude should not have modulation, got score={:.4}",
4914            result.modulation_score
4915        );
4916        assert_eq!(
4917            result.modulation_type,
4918            ModulationType::Stable,
4919            "Should be classified as Stable"
4920        );
4921    }
4922
4923    #[test]
4924    fn test_amplitude_modulation_emerging() {
4925        // Amplitude increases over time (emerging seasonality)
4926        let m = 200;
4927        let period = 0.2;
4928        let argvals: Vec<f64> = (0..m).map(|i| i as f64 / (m - 1) as f64).collect();
4929
4930        // Amplitude grows from 0.2 to 1.0
4931        let data: Vec<f64> = argvals
4932            .iter()
4933            .map(|&t| {
4934                let amplitude = 0.2 + 0.8 * t; // Linear increase
4935                amplitude * (2.0 * PI * t / period).sin()
4936            })
4937            .collect();
4938
4939        let result = detect_amplitude_modulation(&data, 1, m, &argvals, period, 0.15, 0.2);
4940
4941        eprintln!(
4942            "Emerging test: is_seasonal={}, has_modulation={}, modulation_score={:.4}, amplitude_trend={:.4}, type={:?}",
4943            result.is_seasonal, result.has_modulation, result.modulation_score, result.amplitude_trend, result.modulation_type
4944        );
4945
4946        assert!(result.is_seasonal, "Should detect seasonality");
4947        assert!(
4948            result.has_modulation,
4949            "Growing amplitude should have modulation, score={:.4}",
4950            result.modulation_score
4951        );
4952        assert_eq!(
4953            result.modulation_type,
4954            ModulationType::Emerging,
4955            "Should be classified as Emerging, trend={:.4}",
4956            result.amplitude_trend
4957        );
4958        assert!(
4959            result.amplitude_trend > 0.0,
4960            "Trend should be positive for emerging"
4961        );
4962    }
4963
4964    #[test]
4965    fn test_amplitude_modulation_fading() {
4966        // Amplitude decreases over time (fading seasonality)
4967        let m = 200;
4968        let period = 0.2;
4969        let argvals: Vec<f64> = (0..m).map(|i| i as f64 / (m - 1) as f64).collect();
4970
4971        // Amplitude decreases from 1.0 to 0.2
4972        let data: Vec<f64> = argvals
4973            .iter()
4974            .map(|&t| {
4975                let amplitude = 1.0 - 0.8 * t; // Linear decrease
4976                amplitude * (2.0 * PI * t / period).sin()
4977            })
4978            .collect();
4979
4980        let result = detect_amplitude_modulation(&data, 1, m, &argvals, period, 0.15, 0.2);
4981
4982        eprintln!(
4983            "Fading test: is_seasonal={}, has_modulation={}, modulation_score={:.4}, amplitude_trend={:.4}, type={:?}",
4984            result.is_seasonal, result.has_modulation, result.modulation_score, result.amplitude_trend, result.modulation_type
4985        );
4986
4987        assert!(result.is_seasonal, "Should detect seasonality");
4988        assert!(
4989            result.has_modulation,
4990            "Fading amplitude should have modulation"
4991        );
4992        assert_eq!(
4993            result.modulation_type,
4994            ModulationType::Fading,
4995            "Should be classified as Fading, trend={:.4}",
4996            result.amplitude_trend
4997        );
4998        assert!(
4999            result.amplitude_trend < 0.0,
5000            "Trend should be negative for fading"
5001        );
5002    }
5003
5004    #[test]
5005    fn test_amplitude_modulation_oscillating() {
5006        // Amplitude oscillates (neither purely emerging nor fading)
5007        let m = 200;
5008        let period = 0.1;
5009        let argvals: Vec<f64> = (0..m).map(|i| i as f64 / (m - 1) as f64).collect();
5010
5011        // Amplitude oscillates: high-low-high-low pattern
5012        let data: Vec<f64> = argvals
5013            .iter()
5014            .map(|&t| {
5015                let amplitude = 0.5 + 0.4 * (2.0 * PI * t * 2.0).sin(); // 2 modulation cycles
5016                amplitude * (2.0 * PI * t / period).sin()
5017            })
5018            .collect();
5019
5020        let result = detect_amplitude_modulation(&data, 1, m, &argvals, period, 0.15, 0.2);
5021
5022        eprintln!(
5023            "Oscillating test: is_seasonal={}, has_modulation={}, modulation_score={:.4}, amplitude_trend={:.4}, type={:?}",
5024            result.is_seasonal, result.has_modulation, result.modulation_score, result.amplitude_trend, result.modulation_type
5025        );
5026
5027        assert!(result.is_seasonal, "Should detect seasonality");
5028        // Oscillating has high variation but near-zero trend
5029        if result.has_modulation {
5030            // Trend should be near zero for oscillating
5031            assert!(
5032                result.amplitude_trend.abs() < 0.5,
5033                "Trend should be small for oscillating"
5034            );
5035        }
5036    }
5037
5038    #[test]
5039    fn test_amplitude_modulation_non_seasonal() {
5040        // Pure noise - no seasonality
5041        let m = 100;
5042        let argvals: Vec<f64> = (0..m).map(|i| i as f64 / (m - 1) as f64).collect();
5043
5044        // Random noise (use simple pseudo-random)
5045        let data: Vec<f64> = (0..m)
5046            .map(|i| ((i as f64 * 1.618).sin() * 100.0).fract())
5047            .collect();
5048
5049        let result = detect_amplitude_modulation(
5050            &data, 1, m, &argvals, 0.2, // arbitrary period
5051            0.15, 0.3,
5052        );
5053
5054        assert!(
5055            !result.is_seasonal,
5056            "Noise should not be detected as seasonal"
5057        );
5058        assert_eq!(
5059            result.modulation_type,
5060            ModulationType::NonSeasonal,
5061            "Should be classified as NonSeasonal"
5062        );
5063    }
5064
5065    // ========================================================================
5066    // Wavelet-based Amplitude Modulation Detection Tests
5067    // ========================================================================
5068
5069    #[test]
5070    fn test_wavelet_amplitude_modulation_stable() {
5071        let m = 200;
5072        let period = 0.2;
5073        let argvals: Vec<f64> = (0..m).map(|i| i as f64 / (m - 1) as f64).collect();
5074
5075        let data: Vec<f64> = argvals
5076            .iter()
5077            .map(|&t| (2.0 * PI * t / period).sin())
5078            .collect();
5079
5080        let result = detect_amplitude_modulation_wavelet(&data, 1, m, &argvals, period, 0.15, 0.3);
5081
5082        eprintln!(
5083            "Wavelet stable: is_seasonal={}, has_modulation={}, modulation_score={:.4}, amplitude_trend={:.4}, type={:?}",
5084            result.is_seasonal, result.has_modulation, result.modulation_score, result.amplitude_trend, result.modulation_type
5085        );
5086
5087        assert!(result.is_seasonal, "Should detect seasonality");
5088        assert!(
5089            !result.has_modulation,
5090            "Constant amplitude should not have modulation, got score={:.4}",
5091            result.modulation_score
5092        );
5093    }
5094
5095    #[test]
5096    fn test_wavelet_amplitude_modulation_emerging() {
5097        let m = 200;
5098        let period = 0.2;
5099        let argvals: Vec<f64> = (0..m).map(|i| i as f64 / (m - 1) as f64).collect();
5100
5101        // Amplitude grows from 0.2 to 1.0
5102        let data: Vec<f64> = argvals
5103            .iter()
5104            .map(|&t| {
5105                let amplitude = 0.2 + 0.8 * t;
5106                amplitude * (2.0 * PI * t / period).sin()
5107            })
5108            .collect();
5109
5110        let result = detect_amplitude_modulation_wavelet(&data, 1, m, &argvals, period, 0.15, 0.2);
5111
5112        eprintln!(
5113            "Wavelet emerging: is_seasonal={}, has_modulation={}, modulation_score={:.4}, amplitude_trend={:.4}, type={:?}",
5114            result.is_seasonal, result.has_modulation, result.modulation_score, result.amplitude_trend, result.modulation_type
5115        );
5116
5117        assert!(result.is_seasonal, "Should detect seasonality");
5118        assert!(
5119            result.has_modulation,
5120            "Growing amplitude should have modulation"
5121        );
5122        assert!(
5123            result.amplitude_trend > 0.0,
5124            "Trend should be positive for emerging"
5125        );
5126    }
5127
5128    #[test]
5129    fn test_wavelet_amplitude_modulation_fading() {
5130        let m = 200;
5131        let period = 0.2;
5132        let argvals: Vec<f64> = (0..m).map(|i| i as f64 / (m - 1) as f64).collect();
5133
5134        // Amplitude decreases from 1.0 to 0.2
5135        let data: Vec<f64> = argvals
5136            .iter()
5137            .map(|&t| {
5138                let amplitude = 1.0 - 0.8 * t;
5139                amplitude * (2.0 * PI * t / period).sin()
5140            })
5141            .collect();
5142
5143        let result = detect_amplitude_modulation_wavelet(&data, 1, m, &argvals, period, 0.15, 0.2);
5144
5145        eprintln!(
5146            "Wavelet fading: is_seasonal={}, has_modulation={}, modulation_score={:.4}, amplitude_trend={:.4}, type={:?}",
5147            result.is_seasonal, result.has_modulation, result.modulation_score, result.amplitude_trend, result.modulation_type
5148        );
5149
5150        assert!(result.is_seasonal, "Should detect seasonality");
5151        assert!(
5152            result.has_modulation,
5153            "Fading amplitude should have modulation"
5154        );
5155        assert!(
5156            result.amplitude_trend < 0.0,
5157            "Trend should be negative for fading"
5158        );
5159    }
5160
5161    #[test]
5162    fn test_seasonal_strength_wavelet() {
5163        let m = 200;
5164        let period = 0.2;
5165        let argvals: Vec<f64> = (0..m).map(|i| i as f64 / (m - 1) as f64).collect();
5166
5167        // Pure sine wave at target period - should have high strength
5168        let seasonal_data: Vec<f64> = argvals
5169            .iter()
5170            .map(|&t| (2.0 * PI * t / period).sin())
5171            .collect();
5172
5173        let strength = seasonal_strength_wavelet(&seasonal_data, 1, m, &argvals, period);
5174        eprintln!("Wavelet strength (pure sine): {:.4}", strength);
5175        assert!(
5176            strength > 0.5,
5177            "Pure sine should have high wavelet strength"
5178        );
5179
5180        // Pure noise - should have low strength
5181        let noise_data: Vec<f64> = (0..m)
5182            .map(|i| ((i * 12345 + 67890) % 1000) as f64 / 1000.0 - 0.5)
5183            .collect();
5184
5185        let noise_strength = seasonal_strength_wavelet(&noise_data, 1, m, &argvals, period);
5186        eprintln!("Wavelet strength (noise): {:.4}", noise_strength);
5187        assert!(
5188            noise_strength < 0.3,
5189            "Noise should have low wavelet strength"
5190        );
5191
5192        // Wrong period - should have lower strength
5193        let wrong_period_strength =
5194            seasonal_strength_wavelet(&seasonal_data, 1, m, &argvals, period * 2.0);
5195        eprintln!(
5196            "Wavelet strength (wrong period): {:.4}",
5197            wrong_period_strength
5198        );
5199        assert!(
5200            wrong_period_strength < strength,
5201            "Wrong period should have lower strength"
5202        );
5203    }
5204
5205    #[test]
5206    fn test_compute_mean_curve() {
5207        // 2 samples, 3 time points
5208        // Sample 1: [1, 2, 3]
5209        // Sample 2: [2, 4, 6]
5210        // Mean: [1.5, 3, 4.5]
5211        let data = vec![1.0, 2.0, 2.0, 4.0, 3.0, 6.0]; // column-major
5212        let mean = compute_mean_curve(&data, 2, 3);
5213        assert_eq!(mean.len(), 3);
5214        assert!((mean[0] - 1.5).abs() < 1e-10);
5215        assert!((mean[1] - 3.0).abs() < 1e-10);
5216        assert!((mean[2] - 4.5).abs() < 1e-10);
5217    }
5218
5219    #[test]
5220    fn test_compute_mean_curve_parallel_consistency() {
5221        // Test that parallel and sequential give same results
5222        let n = 10;
5223        let m = 200;
5224        let data: Vec<f64> = (0..n * m).map(|i| (i as f64 * 0.1).sin()).collect();
5225
5226        let seq_result = compute_mean_curve_impl(&data, n, m, false);
5227        let par_result = compute_mean_curve_impl(&data, n, m, true);
5228
5229        assert_eq!(seq_result.len(), par_result.len());
5230        for (s, p) in seq_result.iter().zip(par_result.iter()) {
5231            assert!(
5232                (s - p).abs() < 1e-10,
5233                "Sequential and parallel results differ"
5234            );
5235        }
5236    }
5237
5238    #[test]
5239    fn test_interior_bounds() {
5240        // m = 100: edge_skip = 10, interior = [10, 90)
5241        let bounds = interior_bounds(100);
5242        assert!(bounds.is_some());
5243        let (start, end) = bounds.unwrap();
5244        assert_eq!(start, 10);
5245        assert_eq!(end, 90);
5246
5247        // m = 10: edge_skip = 1, but min(1, 2) = 1, max(9, 7) = 9
5248        let bounds = interior_bounds(10);
5249        assert!(bounds.is_some());
5250        let (start, end) = bounds.unwrap();
5251        assert!(start < end);
5252
5253        // Very small m might not have valid interior
5254        let bounds = interior_bounds(2);
5255        // Should still return something as long as end > start
5256        assert!(bounds.is_some() || bounds.is_none());
5257    }
5258
5259    #[test]
5260    fn test_hilbert_transform_pure_sine() {
5261        // Hilbert transform of sin(t) should give cos(t) in imaginary part
5262        let m = 200;
5263        let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.1).collect();
5264        let signal: Vec<f64> = argvals.iter().map(|&t| (2.0 * PI * t).sin()).collect();
5265
5266        let analytic = hilbert_transform(&signal);
5267        assert_eq!(analytic.len(), m);
5268
5269        // Check amplitude is approximately 1
5270        for c in analytic.iter().skip(10).take(m - 20) {
5271            let amp = c.norm();
5272            assert!(
5273                (amp - 1.0).abs() < 0.1,
5274                "Amplitude should be ~1, got {}",
5275                amp
5276            );
5277        }
5278    }
5279
5280    #[test]
5281    fn test_sazed_pure_sine() {
5282        // Pure sine wave with known period
5283        let m = 200;
5284        let period = 2.0;
5285        let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.1).collect();
5286        let data: Vec<f64> = argvals
5287            .iter()
5288            .map(|&t| (2.0 * PI * t / period).sin())
5289            .collect();
5290
5291        let result = sazed(&data, &argvals, None);
5292
5293        assert!(result.period.is_finite(), "SAZED should detect a period");
5294        assert!(
5295            (result.period - period).abs() < 0.3,
5296            "Expected period ~{}, got {}",
5297            period,
5298            result.period
5299        );
5300        assert!(
5301            result.confidence > 0.4,
5302            "Expected confidence > 0.4, got {}",
5303            result.confidence
5304        );
5305        assert!(
5306            result.agreeing_components >= 2,
5307            "Expected at least 2 agreeing components, got {}",
5308            result.agreeing_components
5309        );
5310    }
5311
5312    #[test]
5313    fn test_sazed_noisy_sine() {
5314        // Sine wave with noise
5315        let m = 300;
5316        let period = 3.0;
5317        let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.1).collect();
5318
5319        // Deterministic pseudo-noise using sin with different frequency
5320        let data: Vec<f64> = argvals
5321            .iter()
5322            .enumerate()
5323            .map(|(i, &t)| {
5324                let signal = (2.0 * PI * t / period).sin();
5325                let noise = 0.1 * (17.3 * i as f64).sin();
5326                signal + noise
5327            })
5328            .collect();
5329
5330        let result = sazed(&data, &argvals, Some(0.15));
5331
5332        assert!(
5333            result.period.is_finite(),
5334            "SAZED should detect a period even with noise"
5335        );
5336        assert!(
5337            (result.period - period).abs() < 0.5,
5338            "Expected period ~{}, got {}",
5339            period,
5340            result.period
5341        );
5342    }
5343
5344    #[test]
5345    fn test_sazed_fdata() {
5346        // Multiple samples with same period
5347        let n = 5;
5348        let m = 200;
5349        let period = 2.0;
5350        let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.1).collect();
5351        let data = generate_sine(n, m, period, &argvals);
5352
5353        let result = sazed_fdata(&data, n, m, &argvals, None);
5354
5355        assert!(result.period.is_finite(), "SAZED should detect a period");
5356        assert!(
5357            (result.period - period).abs() < 0.3,
5358            "Expected period ~{}, got {}",
5359            period,
5360            result.period
5361        );
5362    }
5363
5364    #[test]
5365    fn test_sazed_short_series() {
5366        // Very short series - should return NaN gracefully
5367        let argvals: Vec<f64> = (0..5).map(|i| i as f64).collect();
5368        let data: Vec<f64> = argvals.iter().map(|&t| t.sin()).collect();
5369
5370        let result = sazed(&data, &argvals, None);
5371
5372        // Should handle gracefully (return NaN for too-short series)
5373        assert!(
5374            result.period.is_nan() || result.period.is_finite(),
5375            "Should return NaN or valid period"
5376        );
5377    }
5378
5379    #[test]
5380    fn test_autoperiod_pure_sine() {
5381        // Pure sine wave with known period
5382        let m = 200;
5383        let period = 2.0;
5384        let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.1).collect();
5385        let data: Vec<f64> = argvals
5386            .iter()
5387            .map(|&t| (2.0 * PI * t / period).sin())
5388            .collect();
5389
5390        let result = autoperiod(&data, &argvals, None, None);
5391
5392        assert!(
5393            result.period.is_finite(),
5394            "Autoperiod should detect a period"
5395        );
5396        assert!(
5397            (result.period - period).abs() < 0.3,
5398            "Expected period ~{}, got {}",
5399            period,
5400            result.period
5401        );
5402        assert!(
5403            result.confidence > 0.0,
5404            "Expected positive confidence, got {}",
5405            result.confidence
5406        );
5407    }
5408
5409    #[test]
5410    fn test_autoperiod_with_trend() {
5411        // Sine wave with linear trend
5412        let m = 300;
5413        let period = 3.0;
5414        let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.1).collect();
5415        let data: Vec<f64> = argvals
5416            .iter()
5417            .map(|&t| 0.2 * t + (2.0 * PI * t / period).sin())
5418            .collect();
5419
5420        let result = autoperiod(&data, &argvals, None, None);
5421
5422        assert!(
5423            result.period.is_finite(),
5424            "Autoperiod should detect a period"
5425        );
5426        // Allow more tolerance with trend
5427        assert!(
5428            (result.period - period).abs() < 0.5,
5429            "Expected period ~{}, got {}",
5430            period,
5431            result.period
5432        );
5433    }
5434
5435    #[test]
5436    fn test_autoperiod_candidates() {
5437        // Verify candidates are generated
5438        let m = 200;
5439        let period = 2.0;
5440        let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.1).collect();
5441        let data: Vec<f64> = argvals
5442            .iter()
5443            .map(|&t| (2.0 * PI * t / period).sin())
5444            .collect();
5445
5446        let result = autoperiod(&data, &argvals, Some(5), Some(10));
5447
5448        assert!(
5449            !result.candidates.is_empty(),
5450            "Should have at least one candidate"
5451        );
5452
5453        // Best candidate should have highest combined score
5454        let max_score = result
5455            .candidates
5456            .iter()
5457            .map(|c| c.combined_score)
5458            .fold(f64::NEG_INFINITY, f64::max);
5459        assert!(
5460            (result.confidence - max_score).abs() < 1e-10,
5461            "Returned confidence should match best candidate's score"
5462        );
5463    }
5464
5465    #[test]
5466    fn test_autoperiod_fdata() {
5467        // Multiple samples with same period
5468        let n = 5;
5469        let m = 200;
5470        let period = 2.0;
5471        let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.1).collect();
5472        let data = generate_sine(n, m, period, &argvals);
5473
5474        let result = autoperiod_fdata(&data, n, m, &argvals, None, None);
5475
5476        assert!(
5477            result.period.is_finite(),
5478            "Autoperiod should detect a period"
5479        );
5480        assert!(
5481            (result.period - period).abs() < 0.3,
5482            "Expected period ~{}, got {}",
5483            period,
5484            result.period
5485        );
5486    }
5487
5488    #[test]
5489    fn test_cfd_autoperiod_pure_sine() {
5490        // Pure sine wave with known period
5491        let m = 200;
5492        let period = 2.0;
5493        let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.1).collect();
5494        let data: Vec<f64> = argvals
5495            .iter()
5496            .map(|&t| (2.0 * PI * t / period).sin())
5497            .collect();
5498
5499        let result = cfd_autoperiod(&data, &argvals, None, None);
5500
5501        assert!(
5502            result.period.is_finite(),
5503            "CFDAutoperiod should detect a period"
5504        );
5505        assert!(
5506            (result.period - period).abs() < 0.3,
5507            "Expected period ~{}, got {}",
5508            period,
5509            result.period
5510        );
5511    }
5512
5513    #[test]
5514    fn test_cfd_autoperiod_with_trend() {
5515        // Sine wave with strong linear trend - CFD excels here
5516        let m = 300;
5517        let period = 3.0;
5518        let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.1).collect();
5519        let data: Vec<f64> = argvals
5520            .iter()
5521            .map(|&t| 2.0 * t + (2.0 * PI * t / period).sin())
5522            .collect();
5523
5524        let result = cfd_autoperiod(&data, &argvals, None, None);
5525
5526        assert!(
5527            result.period.is_finite(),
5528            "CFDAutoperiod should detect a period despite trend"
5529        );
5530        // Allow more tolerance since trend can affect detection
5531        assert!(
5532            (result.period - period).abs() < 0.6,
5533            "Expected period ~{}, got {}",
5534            period,
5535            result.period
5536        );
5537    }
5538
5539    #[test]
5540    fn test_cfd_autoperiod_multiple_periods() {
5541        // Signal with two periods - should detect multiple
5542        let m = 400;
5543        let period1 = 2.0;
5544        let period2 = 5.0;
5545        let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.1).collect();
5546        let data: Vec<f64> = argvals
5547            .iter()
5548            .map(|&t| (2.0 * PI * t / period1).sin() + 0.5 * (2.0 * PI * t / period2).sin())
5549            .collect();
5550
5551        let result = cfd_autoperiod(&data, &argvals, None, None);
5552
5553        assert!(
5554            !result.periods.is_empty(),
5555            "Should detect at least one period"
5556        );
5557        // The primary period should be one of the two
5558        let close_to_p1 = (result.period - period1).abs() < 0.5;
5559        let close_to_p2 = (result.period - period2).abs() < 1.0;
5560        assert!(
5561            close_to_p1 || close_to_p2,
5562            "Primary period {} not close to {} or {}",
5563            result.period,
5564            period1,
5565            period2
5566        );
5567    }
5568
5569    #[test]
5570    fn test_cfd_autoperiod_fdata() {
5571        // Multiple samples with same period
5572        let n = 5;
5573        let m = 200;
5574        let period = 2.0;
5575        let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.1).collect();
5576        let data = generate_sine(n, m, period, &argvals);
5577
5578        let result = cfd_autoperiod_fdata(&data, n, m, &argvals, None, None);
5579
5580        assert!(
5581            result.period.is_finite(),
5582            "CFDAutoperiod should detect a period"
5583        );
5584        assert!(
5585            (result.period - period).abs() < 0.3,
5586            "Expected period ~{}, got {}",
5587            period,
5588            result.period
5589        );
5590    }
5591}