Skip to main content

fdars_core/
seasonal.rs

1//! Seasonal time series analysis for functional data.
2//!
3//! This module provides functions for analyzing seasonal patterns in functional data:
4//! - Period estimation (FFT, autocorrelation, regression-based)
5//! - Peak detection with prominence calculation
6//! - Seasonal strength measurement (variance and spectral methods)
7//! - Seasonality change detection (onset/cessation)
8//! - Instantaneous period estimation for drifting seasonality
9//! - Peak timing variability analysis for short series
10//! - Seasonality classification
11
12use crate::basis::fourier_basis_with_period;
13use crate::fdata::deriv_1d;
14use crate::matrix::FdMatrix;
15use crate::{iter_maybe_parallel, slice_maybe_parallel};
16use num_complex::Complex;
17#[cfg(feature = "parallel")]
18use rayon::iter::ParallelIterator;
19use rustfft::FftPlanner;
20use std::f64::consts::PI;
21
22/// Result of period estimation.
23#[derive(Debug, Clone)]
24pub struct PeriodEstimate {
25    /// Estimated period
26    pub period: f64,
27    /// Dominant frequency (1/period)
28    pub frequency: f64,
29    /// Power at the dominant frequency
30    pub power: f64,
31    /// Confidence measure (ratio of peak power to mean power)
32    pub confidence: f64,
33}
34
35/// A detected peak in functional data.
36#[derive(Debug, Clone)]
37pub struct Peak {
38    /// Time at which the peak occurs
39    pub time: f64,
40    /// Value at the peak
41    pub value: f64,
42    /// Prominence of the peak (height relative to surrounding valleys)
43    pub prominence: f64,
44}
45
46/// Result of peak detection.
47#[derive(Debug, Clone)]
48pub struct PeakDetectionResult {
49    /// Peaks for each sample: `peaks[sample_idx]` contains peaks for that sample
50    pub peaks: Vec<Vec<Peak>>,
51    /// Inter-peak distances for each sample
52    pub inter_peak_distances: Vec<Vec<f64>>,
53    /// Mean period estimated from inter-peak distances across all samples
54    pub mean_period: f64,
55}
56
57/// A detected period from multiple period detection.
58#[derive(Debug, Clone)]
59pub struct DetectedPeriod {
60    /// Estimated period
61    pub period: f64,
62    /// FFT confidence (ratio of peak power to mean power)
63    pub confidence: f64,
64    /// Seasonal strength at this period (variance explained)
65    pub strength: f64,
66    /// Amplitude of the sinusoidal component
67    pub amplitude: f64,
68    /// Phase of the sinusoidal component (radians)
69    pub phase: f64,
70    /// Iteration number (1-indexed)
71    pub iteration: usize,
72}
73
74/// Method for computing seasonal strength.
75#[derive(Debug, Clone, Copy, PartialEq, Eq)]
76pub enum StrengthMethod {
77    /// Variance decomposition: Var(seasonal) / Var(total)
78    Variance,
79    /// Spectral: power at seasonal frequencies / total power
80    Spectral,
81}
82
83/// A detected change point in seasonality.
84#[derive(Debug, Clone)]
85pub struct ChangePoint {
86    /// Time at which the change occurs
87    pub time: f64,
88    /// Type of change
89    pub change_type: ChangeType,
90    /// Seasonal strength before the change
91    pub strength_before: f64,
92    /// Seasonal strength after the change
93    pub strength_after: f64,
94}
95
96/// Type of seasonality change.
97#[derive(Debug, Clone, Copy, PartialEq, Eq)]
98pub enum ChangeType {
99    /// Series becomes seasonal
100    Onset,
101    /// Series stops being seasonal
102    Cessation,
103}
104
105/// Result of seasonality change detection.
106#[derive(Debug, Clone)]
107pub struct ChangeDetectionResult {
108    /// Detected change points
109    pub change_points: Vec<ChangePoint>,
110    /// Time-varying seasonal strength curve used for detection
111    pub strength_curve: Vec<f64>,
112}
113
114/// Result of instantaneous period estimation.
115#[derive(Debug, Clone)]
116pub struct InstantaneousPeriod {
117    /// Instantaneous period at each time point
118    pub period: Vec<f64>,
119    /// Instantaneous frequency at each time point
120    pub frequency: Vec<f64>,
121    /// Instantaneous amplitude (envelope) at each time point
122    pub amplitude: Vec<f64>,
123}
124
125/// Result of peak timing variability analysis.
126#[derive(Debug, Clone)]
127pub struct PeakTimingResult {
128    /// Peak times for each cycle
129    pub peak_times: Vec<f64>,
130    /// Peak values
131    pub peak_values: Vec<f64>,
132    /// Within-period timing (0-1 scale, e.g., day-of-year / 365)
133    pub normalized_timing: Vec<f64>,
134    /// Mean normalized timing
135    pub mean_timing: f64,
136    /// Standard deviation of normalized timing
137    pub std_timing: f64,
138    /// Range of normalized timing (max - min)
139    pub range_timing: f64,
140    /// Variability score (0 = stable, 1 = highly variable)
141    pub variability_score: f64,
142    /// Trend in timing (positive = peaks getting later)
143    pub timing_trend: f64,
144    /// Cycle indices (1-indexed)
145    pub cycle_indices: Vec<usize>,
146}
147
148/// Type of seasonality pattern.
149#[derive(Debug, Clone, Copy, PartialEq, Eq)]
150pub enum SeasonalType {
151    /// Regular peaks with consistent timing
152    StableSeasonal,
153    /// Regular peaks but timing shifts between cycles
154    VariableTiming,
155    /// Some cycles seasonal, some not
156    IntermittentSeasonal,
157    /// No clear seasonality
158    NonSeasonal,
159}
160
161/// Result of seasonality classification.
162#[derive(Debug, Clone)]
163pub struct SeasonalityClassification {
164    /// Whether the series is seasonal overall
165    pub is_seasonal: bool,
166    /// Whether peak timing is stable across cycles
167    pub has_stable_timing: bool,
168    /// Timing variability score (0 = stable, 1 = highly variable)
169    pub timing_variability: f64,
170    /// Overall seasonal strength
171    pub seasonal_strength: f64,
172    /// Per-cycle seasonal strength
173    pub cycle_strengths: Vec<f64>,
174    /// Indices of weak/missing seasons (0-indexed)
175    pub weak_seasons: Vec<usize>,
176    /// Classification type
177    pub classification: SeasonalType,
178    /// Peak timing analysis (if peaks were detected)
179    pub peak_timing: Option<PeakTimingResult>,
180}
181
182/// Method for automatic threshold selection.
183#[derive(Debug, Clone, Copy, PartialEq)]
184pub enum ThresholdMethod {
185    /// Fixed user-specified threshold
186    Fixed(f64),
187    /// Percentile of strength distribution
188    Percentile(f64),
189    /// Otsu's method (optimal bimodal separation)
190    Otsu,
191}
192
193/// Type of amplitude modulation pattern.
194#[derive(Debug, Clone, Copy, PartialEq, Eq)]
195pub enum ModulationType {
196    /// Constant amplitude (no modulation)
197    Stable,
198    /// Amplitude increases over time (seasonality emerges)
199    Emerging,
200    /// Amplitude decreases over time (seasonality fades)
201    Fading,
202    /// Amplitude varies non-monotonically
203    Oscillating,
204    /// No seasonality detected
205    NonSeasonal,
206}
207
208/// Result of amplitude modulation detection.
209#[derive(Debug, Clone)]
210pub struct AmplitudeModulationResult {
211    /// Whether seasonality is present (using robust spectral method)
212    pub is_seasonal: bool,
213    /// Overall seasonal strength (spectral method)
214    pub seasonal_strength: f64,
215    /// Whether amplitude modulation is detected
216    pub has_modulation: bool,
217    /// Type of amplitude modulation
218    pub modulation_type: ModulationType,
219    /// Coefficient of variation of time-varying strength (0 = stable, higher = more modulation)
220    pub modulation_score: f64,
221    /// Trend in amplitude (-1 to 1: negative = fading, positive = emerging)
222    pub amplitude_trend: f64,
223    /// Time-varying seasonal strength curve
224    pub strength_curve: Vec<f64>,
225    /// Time points corresponding to strength_curve
226    pub time_points: Vec<f64>,
227    /// Minimum strength in the curve
228    pub min_strength: f64,
229    /// Maximum strength in the curve
230    pub max_strength: f64,
231}
232
233/// Result of wavelet-based amplitude modulation detection.
234#[derive(Debug, Clone)]
235pub struct WaveletAmplitudeResult {
236    /// Whether seasonality is present
237    pub is_seasonal: bool,
238    /// Overall seasonal strength
239    pub seasonal_strength: f64,
240    /// Whether amplitude modulation is detected
241    pub has_modulation: bool,
242    /// Type of amplitude modulation
243    pub modulation_type: ModulationType,
244    /// Coefficient of variation of wavelet amplitude
245    pub modulation_score: f64,
246    /// Trend in amplitude (-1 to 1)
247    pub amplitude_trend: f64,
248    /// Wavelet amplitude at the seasonal frequency over time
249    pub wavelet_amplitude: Vec<f64>,
250    /// Time points corresponding to wavelet_amplitude
251    pub time_points: Vec<f64>,
252    /// Scale (period) used for wavelet analysis
253    pub scale: f64,
254}
255
256// ============================================================================
257// Internal helper functions
258// ============================================================================
259
260/// Compute mean curve from functional data matrix.
261///
262/// # Arguments
263/// * `data` - Functional data matrix (n x m)
264/// * `parallel` - Use parallel iteration (default: true)
265///
266/// # Returns
267/// Mean curve of length m
268#[inline]
269fn compute_mean_curve_impl(data: &FdMatrix, parallel: bool) -> Vec<f64> {
270    let (n, m) = data.shape();
271    if parallel && m >= 100 {
272        // Use parallel iteration for larger datasets
273        iter_maybe_parallel!(0..m)
274            .map(|j| {
275                let mut sum = 0.0;
276                for i in 0..n {
277                    sum += data[(i, j)];
278                }
279                sum / n as f64
280            })
281            .collect()
282    } else {
283        // Sequential for small datasets or when disabled
284        (0..m)
285            .map(|j| {
286                let mut sum = 0.0;
287                for i in 0..n {
288                    sum += data[(i, j)];
289                }
290                sum / n as f64
291            })
292            .collect()
293    }
294}
295
296/// Compute mean curve (parallel by default for m >= 100).
297#[inline]
298fn compute_mean_curve(data: &FdMatrix) -> Vec<f64> {
299    compute_mean_curve_impl(data, true)
300}
301
302/// Compute interior bounds for edge-skipping (10% on each side).
303///
304/// Used to avoid edge effects in wavelet and other analyses.
305///
306/// # Arguments
307/// * `m` - Total number of points
308///
309/// # Returns
310/// `(interior_start, interior_end)` indices, or `None` if range is invalid
311#[inline]
312fn interior_bounds(m: usize) -> Option<(usize, usize)> {
313    let edge_skip = (m as f64 * 0.1) as usize;
314    let interior_start = edge_skip.min(m / 4);
315    let interior_end = m.saturating_sub(edge_skip).max(m * 3 / 4);
316
317    if interior_end <= interior_start {
318        None
319    } else {
320        Some((interior_start, interior_end))
321    }
322}
323
324/// Validate interior bounds with minimum span requirement.
325fn valid_interior_bounds(m: usize, min_span: usize) -> Option<(usize, usize)> {
326    interior_bounds(m).filter(|&(s, e)| e > s + min_span)
327}
328
329/// Compute periodogram from data using FFT.
330/// Returns (frequencies, power) where frequencies are in cycles per unit time.
331fn periodogram(data: &[f64], argvals: &[f64]) -> (Vec<f64>, Vec<f64>) {
332    let n = data.len();
333    if n < 2 || argvals.len() != n {
334        return (Vec::new(), Vec::new());
335    }
336
337    let dt = (argvals[n - 1] - argvals[0]) / (n - 1) as f64;
338    let fs = 1.0 / dt; // Sampling frequency
339
340    let mut planner = FftPlanner::<f64>::new();
341    let fft = planner.plan_fft_forward(n);
342
343    let mut buffer: Vec<Complex<f64>> = data.iter().map(|&x| Complex::new(x, 0.0)).collect();
344    fft.process(&mut buffer);
345
346    // Compute power spectrum (one-sided)
347    let n_freq = n / 2 + 1;
348    let mut power = Vec::with_capacity(n_freq);
349    let mut frequencies = Vec::with_capacity(n_freq);
350
351    for k in 0..n_freq {
352        let freq = k as f64 * fs / n as f64;
353        frequencies.push(freq);
354
355        let p = buffer[k].norm_sqr() / (n as f64 * n as f64);
356        // Double power for non-DC and non-Nyquist frequencies (one-sided spectrum)
357        let p = if k > 0 && k < n / 2 { 2.0 * p } else { p };
358        power.push(p);
359    }
360
361    (frequencies, power)
362}
363
364/// Compute autocorrelation function up to max_lag.
365fn autocorrelation(data: &[f64], max_lag: usize) -> Vec<f64> {
366    let n = data.len();
367    if n == 0 {
368        return Vec::new();
369    }
370
371    let mean: f64 = data.iter().sum::<f64>() / n as f64;
372    let var: f64 = data.iter().map(|&x| (x - mean).powi(2)).sum::<f64>() / n as f64;
373
374    if var < 1e-15 {
375        return vec![1.0; max_lag.min(n) + 1];
376    }
377
378    let max_lag = max_lag.min(n - 1);
379    let mut acf = Vec::with_capacity(max_lag + 1);
380
381    for lag in 0..=max_lag {
382        let mut sum = 0.0;
383        for i in 0..(n - lag) {
384            sum += (data[i] - mean) * (data[i + lag] - mean);
385        }
386        acf.push(sum / (n as f64 * var));
387    }
388
389    acf
390}
391
392/// Try to add a peak, respecting minimum distance. Replaces previous peak if closer but higher.
393fn try_add_peak(peaks: &mut Vec<usize>, candidate: usize, signal: &[f64], min_distance: usize) {
394    if let Some(&last) = peaks.last() {
395        if candidate - last >= min_distance {
396            peaks.push(candidate);
397        } else if signal[candidate] > signal[last] {
398            // Safe: peaks is non-empty since last() succeeded
399            *peaks.last_mut().unwrap_or(&mut 0) = candidate;
400        }
401    } else {
402        peaks.push(candidate);
403    }
404}
405
406/// Find peaks in a 1D signal, returning indices.
407fn find_peaks_1d(signal: &[f64], min_distance: usize) -> Vec<usize> {
408    let n = signal.len();
409    if n < 3 {
410        return Vec::new();
411    }
412
413    let mut peaks = Vec::new();
414
415    for i in 1..(n - 1) {
416        if signal[i] > signal[i - 1] && signal[i] > signal[i + 1] {
417            try_add_peak(&mut peaks, i, signal, min_distance);
418        }
419    }
420
421    peaks
422}
423
424/// Compute prominence for a peak (height above surrounding valleys).
425fn compute_prominence(signal: &[f64], peak_idx: usize) -> f64 {
426    let n = signal.len();
427    let peak_val = signal[peak_idx];
428
429    // Find lowest point between peak and boundaries/higher peaks
430    let mut left_min = peak_val;
431    for i in (0..peak_idx).rev() {
432        if signal[i] >= peak_val {
433            break;
434        }
435        left_min = left_min.min(signal[i]);
436    }
437
438    let mut right_min = peak_val;
439    for i in (peak_idx + 1)..n {
440        if signal[i] >= peak_val {
441            break;
442        }
443        right_min = right_min.min(signal[i]);
444    }
445
446    peak_val - left_min.max(right_min)
447}
448
449/// Hilbert transform using FFT to compute analytic signal.
450///
451/// # Arguments
452/// * `signal` - Input real signal
453///
454/// # Returns
455/// Analytic signal as complex vector (real part = original, imaginary = Hilbert transform)
456pub fn hilbert_transform(signal: &[f64]) -> Vec<Complex<f64>> {
457    let n = signal.len();
458    if n == 0 {
459        return Vec::new();
460    }
461
462    let mut planner = FftPlanner::<f64>::new();
463    let fft_forward = planner.plan_fft_forward(n);
464    let fft_inverse = planner.plan_fft_inverse(n);
465
466    // Forward FFT
467    let mut buffer: Vec<Complex<f64>> = signal.iter().map(|&x| Complex::new(x, 0.0)).collect();
468    fft_forward.process(&mut buffer);
469
470    // Create analytic signal in frequency domain
471    // H[0] = 1, H[1..n/2] = 2, H[n/2] = 1 (if n even), H[n/2+1..] = 0
472    let half = n / 2;
473    for k in 1..half {
474        buffer[k] *= 2.0;
475    }
476    for k in (half + 1)..n {
477        buffer[k] = Complex::new(0.0, 0.0);
478    }
479
480    // Inverse FFT
481    fft_inverse.process(&mut buffer);
482
483    // Normalize
484    for c in buffer.iter_mut() {
485        *c /= n as f64;
486    }
487
488    buffer
489}
490
491/// Unwrap phase to remove 2π discontinuities.
492fn unwrap_phase(phase: &[f64]) -> Vec<f64> {
493    if phase.is_empty() {
494        return Vec::new();
495    }
496
497    let mut unwrapped = vec![phase[0]];
498    let mut cumulative_correction = 0.0;
499
500    for i in 1..phase.len() {
501        let diff = phase[i] - phase[i - 1];
502
503        // Check for wraparound
504        if diff > PI {
505            cumulative_correction -= 2.0 * PI;
506        } else if diff < -PI {
507            cumulative_correction += 2.0 * PI;
508        }
509
510        unwrapped.push(phase[i] + cumulative_correction);
511    }
512
513    unwrapped
514}
515
516/// Morlet wavelet function.
517///
518/// The Morlet wavelet is a complex exponential modulated by a Gaussian:
519/// ψ(t) = exp(i * ω₀ * t) * exp(-t² / 2)
520///
521/// where ω₀ is the central frequency (typically 6 for good time-frequency trade-off).
522fn morlet_wavelet(t: f64, omega0: f64) -> Complex<f64> {
523    let gaussian = (-t * t / 2.0).exp();
524    let oscillation = Complex::new((omega0 * t).cos(), (omega0 * t).sin());
525    oscillation * gaussian
526}
527
528/// Continuous Wavelet Transform at a single scale using Morlet wavelet.
529///
530/// Computes the wavelet coefficients at the specified scale (period) for all time points.
531/// Uses convolution in the time domain.
532///
533/// # Arguments
534/// * `signal` - Input signal
535/// * `argvals` - Time points
536/// * `scale` - Scale parameter (related to period)
537/// * `omega0` - Central frequency of Morlet wavelet (default: 6.0)
538///
539/// # Returns
540/// Complex wavelet coefficients at each time point
541#[allow(dead_code)]
542fn cwt_morlet(signal: &[f64], argvals: &[f64], scale: f64, omega0: f64) -> Vec<Complex<f64>> {
543    let n = signal.len();
544    if n == 0 || scale <= 0.0 {
545        return Vec::new();
546    }
547
548    let dt = (argvals[n - 1] - argvals[0]) / (n - 1) as f64;
549
550    // Compute wavelet coefficients via convolution
551    // W(a, b) = (1/sqrt(a)) * Σ x[k] * ψ*((t[k] - b) / a) * dt
552    let norm = 1.0 / scale.sqrt();
553
554    (0..n)
555        .map(|b| {
556            let mut sum = Complex::new(0.0, 0.0);
557            for k in 0..n {
558                let t_normalized = (argvals[k] - argvals[b]) / scale;
559                // Only compute within reasonable range (Gaussian decays quickly)
560                if t_normalized.abs() < 6.0 {
561                    let wavelet = morlet_wavelet(t_normalized, omega0);
562                    sum += signal[k] * wavelet.conj();
563                }
564            }
565            sum * norm * dt
566        })
567        .collect()
568}
569
570/// Continuous Wavelet Transform at a single scale using FFT (faster for large signals).
571///
572/// Uses the convolution theorem: CWT = IFFT(FFT(signal) * FFT(wavelet)*)
573fn cwt_morlet_fft(signal: &[f64], argvals: &[f64], scale: f64, omega0: f64) -> Vec<Complex<f64>> {
574    let n = signal.len();
575    if n == 0 || scale <= 0.0 {
576        return Vec::new();
577    }
578
579    let dt = (argvals[n - 1] - argvals[0]) / (n - 1) as f64;
580    let norm = 1.0 / scale.sqrt();
581
582    // Compute wavelet in time domain centered at t=0
583    let wavelet_time: Vec<Complex<f64>> = (0..n)
584        .map(|k| {
585            // Center the wavelet
586            let t = if k <= n / 2 {
587                k as f64 * dt / scale
588            } else {
589                (k as f64 - n as f64) * dt / scale
590            };
591            morlet_wavelet(t, omega0) * norm
592        })
593        .collect();
594
595    let mut planner = FftPlanner::<f64>::new();
596    let fft_forward = planner.plan_fft_forward(n);
597    let fft_inverse = planner.plan_fft_inverse(n);
598
599    // FFT of signal
600    let mut signal_fft: Vec<Complex<f64>> = signal.iter().map(|&x| Complex::new(x, 0.0)).collect();
601    fft_forward.process(&mut signal_fft);
602
603    // FFT of wavelet
604    let mut wavelet_fft = wavelet_time;
605    fft_forward.process(&mut wavelet_fft);
606
607    // Multiply in frequency domain (use conjugate of wavelet FFT for correlation)
608    let mut result: Vec<Complex<f64>> = signal_fft
609        .iter()
610        .zip(wavelet_fft.iter())
611        .map(|(s, w)| *s * w.conj())
612        .collect();
613
614    // Inverse FFT
615    fft_inverse.process(&mut result);
616
617    // Normalize and scale by dt
618    for c in result.iter_mut() {
619        *c *= dt / n as f64;
620    }
621
622    result
623}
624
625/// Compute Otsu's threshold for bimodal separation.
626///
627/// Finds the threshold that minimizes within-class variance (or equivalently
628/// maximizes between-class variance).
629fn otsu_threshold(values: &[f64]) -> f64 {
630    if values.is_empty() {
631        return 0.5;
632    }
633
634    // Filter NaN values
635    let valid: Vec<f64> = values.iter().copied().filter(|x| x.is_finite()).collect();
636    if valid.is_empty() {
637        return 0.5;
638    }
639
640    let vmin = valid.iter().cloned().fold(f64::INFINITY, f64::min);
641    let vmax = valid.iter().cloned().fold(f64::NEG_INFINITY, f64::max);
642
643    if (vmax - vmin).abs() < 1e-10 {
644        return (vmin + vmax) / 2.0;
645    }
646
647    let n_bins = 256;
648    let (histogram, hist_min, bin_width) = build_histogram(&valid, n_bins);
649    let (best_bin, _) = find_optimal_threshold_bin(&histogram, valid.len() as f64);
650
651    hist_min + (best_bin as f64 + 0.5) * bin_width
652}
653
654/// Compute linear regression slope (simple OLS).
655fn linear_slope(x: &[f64], y: &[f64]) -> f64 {
656    if x.len() != y.len() || x.len() < 2 {
657        return 0.0;
658    }
659
660    let n = x.len() as f64;
661    let mean_x: f64 = x.iter().sum::<f64>() / n;
662    let mean_y: f64 = y.iter().sum::<f64>() / n;
663
664    let mut num = 0.0;
665    let mut den = 0.0;
666
667    for (&xi, &yi) in x.iter().zip(y.iter()) {
668        num += (xi - mean_x) * (yi - mean_y);
669        den += (xi - mean_x).powi(2);
670    }
671
672    if den.abs() < 1e-15 {
673        0.0
674    } else {
675        num / den
676    }
677}
678
679/// Statistics from amplitude envelope analysis (shared by Hilbert and wavelet methods).
680struct AmplitudeEnvelopeStats {
681    modulation_score: f64,
682    amplitude_trend: f64,
683    has_modulation: bool,
684    modulation_type: ModulationType,
685    _mean_amp: f64,
686    min_amp: f64,
687    max_amp: f64,
688}
689
690/// Analyze an amplitude envelope slice to compute modulation statistics.
691fn analyze_amplitude_envelope(
692    interior_envelope: &[f64],
693    interior_times: &[f64],
694    modulation_threshold: f64,
695) -> AmplitudeEnvelopeStats {
696    let n_interior = interior_envelope.len() as f64;
697
698    let mean_amp = interior_envelope.iter().sum::<f64>() / n_interior;
699    let min_amp = interior_envelope
700        .iter()
701        .cloned()
702        .fold(f64::INFINITY, f64::min);
703    let max_amp = interior_envelope
704        .iter()
705        .cloned()
706        .fold(f64::NEG_INFINITY, f64::max);
707
708    let variance = interior_envelope
709        .iter()
710        .map(|&a| (a - mean_amp).powi(2))
711        .sum::<f64>()
712        / n_interior;
713    let std_amp = variance.sqrt();
714    let modulation_score = if mean_amp > 1e-10 {
715        std_amp / mean_amp
716    } else {
717        0.0
718    };
719
720    let t_mean = interior_times.iter().sum::<f64>() / n_interior;
721    let mut cov_ta = 0.0;
722    let mut var_t = 0.0;
723    for (&t, &a) in interior_times.iter().zip(interior_envelope.iter()) {
724        cov_ta += (t - t_mean) * (a - mean_amp);
725        var_t += (t - t_mean).powi(2);
726    }
727    let slope = if var_t > 1e-10 { cov_ta / var_t } else { 0.0 };
728
729    let time_span = interior_times.last().unwrap_or(&1.0) - interior_times.first().unwrap_or(&0.0);
730    let amplitude_trend = if mean_amp > 1e-10 && time_span > 1e-10 {
731        (slope * time_span / mean_amp).clamp(-1.0, 1.0)
732    } else {
733        0.0
734    };
735
736    let has_modulation = modulation_score > modulation_threshold;
737    let modulation_type = if !has_modulation {
738        ModulationType::Stable
739    } else if amplitude_trend > 0.3 {
740        ModulationType::Emerging
741    } else if amplitude_trend < -0.3 {
742        ModulationType::Fading
743    } else {
744        ModulationType::Oscillating
745    };
746
747    AmplitudeEnvelopeStats {
748        modulation_score,
749        amplitude_trend,
750        has_modulation,
751        modulation_type,
752        _mean_amp: mean_amp,
753        min_amp,
754        max_amp,
755    }
756}
757
758/// Fit a sinusoid at the given period, subtract it from residual, and return (a, b, amplitude, phase).
759fn fit_and_subtract_sinusoid(
760    residual: &mut [f64],
761    argvals: &[f64],
762    period: f64,
763) -> (f64, f64, f64, f64) {
764    let m = residual.len();
765    let omega = 2.0 * PI / period;
766    let mut cos_sum = 0.0;
767    let mut sin_sum = 0.0;
768
769    for (j, &t) in argvals.iter().enumerate() {
770        cos_sum += residual[j] * (omega * t).cos();
771        sin_sum += residual[j] * (omega * t).sin();
772    }
773
774    let a = 2.0 * cos_sum / m as f64;
775    let b = 2.0 * sin_sum / m as f64;
776    let amplitude = (a * a + b * b).sqrt();
777    let phase = b.atan2(a);
778
779    for (j, &t) in argvals.iter().enumerate() {
780        residual[j] -= a * (omega * t).cos() + b * (omega * t).sin();
781    }
782
783    (a, b, amplitude, phase)
784}
785
786/// Validate a single SAZED component: returns Some(period) if it passes range and confidence checks.
787fn validate_sazed_component(
788    period: f64,
789    confidence: f64,
790    min_period: f64,
791    max_period: f64,
792    threshold: f64,
793) -> Option<f64> {
794    if period.is_finite() && period > min_period && period < max_period && confidence > threshold {
795        Some(period)
796    } else {
797        None
798    }
799}
800
801/// Count how many periods agree with a reference within tolerance, returning (count, sum).
802fn count_agreeing_periods(periods: &[f64], reference: f64, tolerance: f64) -> (usize, f64) {
803    let mut count = 0;
804    let mut sum = 0.0;
805    for &p in periods {
806        let rel_diff = (reference - p).abs() / reference.max(p);
807        if rel_diff <= tolerance {
808            count += 1;
809            sum += p;
810        }
811    }
812    (count, sum)
813}
814
815/// Find the end of the initial ACF descent (first negative or first uptick).
816fn find_acf_descent_end(acf: &[f64]) -> usize {
817    for i in 1..acf.len() {
818        if acf[i] < 0.0 {
819            return i;
820        }
821        if i > 1 && acf[i] > acf[i - 1] {
822            return i - 1;
823        }
824    }
825    1
826}
827
828/// Find the first ACF peak after initial descent. Returns Some((lag, acf_value)).
829fn find_first_acf_peak(acf: &[f64]) -> Option<(usize, f64)> {
830    if acf.len() < 4 {
831        return None;
832    }
833
834    let min_search_start = find_acf_descent_end(acf);
835    let peaks = find_peaks_1d(&acf[min_search_start..], 1);
836    if peaks.is_empty() {
837        return None;
838    }
839
840    let peak_lag = peaks[0] + min_search_start;
841    Some((peak_lag, acf[peak_lag].max(0.0)))
842}
843
844/// Compute per-cycle seasonal strengths and identify weak seasons.
845fn compute_cycle_strengths(
846    data: &FdMatrix,
847    argvals: &[f64],
848    period: f64,
849    strength_thresh: f64,
850) -> (Vec<f64>, Vec<usize>) {
851    let (n, m) = data.shape();
852    let t_start = argvals[0];
853    let t_end = argvals[m - 1];
854    let n_cycles = ((t_end - t_start) / period).floor() as usize;
855
856    let mut cycle_strengths = Vec::with_capacity(n_cycles);
857    let mut weak_seasons = Vec::new();
858
859    for cycle in 0..n_cycles {
860        let cycle_start = t_start + cycle as f64 * period;
861        let cycle_end = cycle_start + period;
862
863        let start_idx = argvals.iter().position(|&t| t >= cycle_start).unwrap_or(0);
864        let end_idx = argvals.iter().position(|&t| t > cycle_end).unwrap_or(m);
865
866        let cycle_m = end_idx - start_idx;
867        if cycle_m < 4 {
868            cycle_strengths.push(f64::NAN);
869            continue;
870        }
871
872        let cycle_data: Vec<f64> = (start_idx..end_idx)
873            .flat_map(|j| (0..n).map(move |i| data[(i, j)]))
874            .collect();
875        let cycle_mat = FdMatrix::from_column_major(cycle_data, n, cycle_m).unwrap();
876        let cycle_argvals: Vec<f64> = argvals[start_idx..end_idx].to_vec();
877
878        let strength = seasonal_strength_variance(&cycle_mat, &cycle_argvals, period, 3);
879
880        cycle_strengths.push(strength);
881        if strength < strength_thresh {
882            weak_seasons.push(cycle);
883        }
884    }
885
886    (cycle_strengths, weak_seasons)
887}
888
889/// Build a histogram from valid values. Returns (histogram, min_val, bin_width).
890fn build_histogram(valid: &[f64], n_bins: usize) -> (Vec<usize>, f64, f64) {
891    let min_val = valid.iter().cloned().fold(f64::INFINITY, f64::min);
892    let max_val = valid.iter().cloned().fold(f64::NEG_INFINITY, f64::max);
893    let bin_width = (max_val - min_val) / n_bins as f64;
894    let mut histogram = vec![0usize; n_bins];
895    for &v in valid {
896        let bin = ((v - min_val) / bin_width).min(n_bins as f64 - 1.0) as usize;
897        histogram[bin] += 1;
898    }
899    (histogram, min_val, bin_width)
900}
901
902/// Find the optimal threshold bin using Otsu's between-class variance. Returns (best_bin, best_variance).
903fn find_optimal_threshold_bin(histogram: &[usize], total: f64) -> (usize, f64) {
904    let n_bins = histogram.len();
905    let mut sum_total = 0.0;
906    for (i, &count) in histogram.iter().enumerate() {
907        sum_total += i as f64 * count as f64;
908    }
909
910    let mut best_bin = 0;
911    let mut best_variance = 0.0;
912    let mut sum_b = 0.0;
913    let mut weight_b = 0.0;
914
915    for t in 0..n_bins {
916        weight_b += histogram[t] as f64;
917        if weight_b == 0.0 {
918            continue;
919        }
920        let weight_f = total - weight_b;
921        if weight_f == 0.0 {
922            break;
923        }
924        sum_b += t as f64 * histogram[t] as f64;
925        let mean_b = sum_b / weight_b;
926        let mean_f = (sum_total - sum_b) / weight_f;
927        let variance = weight_b * weight_f * (mean_b - mean_f).powi(2);
928        if variance > best_variance {
929            best_variance = variance;
930            best_bin = t;
931        }
932    }
933
934    (best_bin, best_variance)
935}
936
937/// Sum power at harmonics of a fundamental frequency within tolerance.
938fn sum_harmonic_power(
939    frequencies: &[f64],
940    power: &[f64],
941    fundamental_freq: f64,
942    tolerance: f64,
943) -> (f64, f64) {
944    let mut seasonal_power = 0.0;
945    let mut total_power = 0.0;
946
947    for (i, (&freq, &p)) in frequencies.iter().zip(power.iter()).enumerate() {
948        if i == 0 {
949            continue;
950        }
951        total_power += p;
952        let ratio = freq / fundamental_freq;
953        let nearest_harmonic = ratio.round();
954        if (ratio - nearest_harmonic).abs() < tolerance && nearest_harmonic >= 1.0 {
955            seasonal_power += p;
956        }
957    }
958
959    (seasonal_power, total_power)
960}
961
962/// Return the new seasonal state if `ss` represents a valid threshold crossing,
963/// or `None` if the index should be skipped (NaN, no change, or too close to the
964/// previous change point).
965fn crossing_direction(
966    ss: f64,
967    threshold: f64,
968    in_seasonal: bool,
969    i: usize,
970    last_change_idx: Option<usize>,
971    min_dur_points: usize,
972) -> Option<bool> {
973    if ss.is_nan() {
974        return None;
975    }
976    let now_seasonal = ss > threshold;
977    if now_seasonal == in_seasonal {
978        return None;
979    }
980    if last_change_idx.is_some_and(|last_idx| i - last_idx < min_dur_points) {
981        return None;
982    }
983    Some(now_seasonal)
984}
985
986/// Build a `ChangePoint` for a threshold crossing at index `i`.
987fn build_change_point(
988    i: usize,
989    ss: f64,
990    now_seasonal: bool,
991    strength_curve: &[f64],
992    argvals: &[f64],
993) -> ChangePoint {
994    let change_type = if now_seasonal {
995        ChangeType::Onset
996    } else {
997        ChangeType::Cessation
998    };
999    let strength_before = if i > 0 && !strength_curve[i - 1].is_nan() {
1000        strength_curve[i - 1]
1001    } else {
1002        ss
1003    };
1004    ChangePoint {
1005        time: argvals[i],
1006        change_type,
1007        strength_before,
1008        strength_after: ss,
1009    }
1010}
1011
1012/// Detect threshold crossings in a strength curve, returning change points.
1013fn detect_threshold_crossings(
1014    strength_curve: &[f64],
1015    argvals: &[f64],
1016    threshold: f64,
1017    min_dur_points: usize,
1018) -> Vec<ChangePoint> {
1019    let mut change_points = Vec::new();
1020    let mut in_seasonal = strength_curve[0] > threshold;
1021    let mut last_change_idx: Option<usize> = None;
1022
1023    for (i, &ss) in strength_curve.iter().enumerate().skip(1) {
1024        let Some(now_seasonal) = crossing_direction(
1025            ss,
1026            threshold,
1027            in_seasonal,
1028            i,
1029            last_change_idx,
1030            min_dur_points,
1031        ) else {
1032            continue;
1033        };
1034
1035        change_points.push(build_change_point(
1036            i,
1037            ss,
1038            now_seasonal,
1039            strength_curve,
1040            argvals,
1041        ));
1042
1043        in_seasonal = now_seasonal;
1044        last_change_idx = Some(i);
1045    }
1046
1047    change_points
1048}
1049
1050// ============================================================================
1051// Period Estimation
1052// ============================================================================
1053
1054/// Estimate period using FFT periodogram.
1055///
1056/// Finds the dominant frequency in the periodogram (excluding DC) and
1057/// returns the corresponding period.
1058///
1059/// # Arguments
1060/// * `data` - Column-major matrix (n x m)
1061/// * `n` - Number of samples
1062/// * `m` - Number of evaluation points
1063/// * `argvals` - Evaluation points (time values)
1064///
1065/// # Returns
1066/// Period estimate with confidence measure
1067pub fn estimate_period_fft(data: &FdMatrix, argvals: &[f64]) -> PeriodEstimate {
1068    let (n, m) = data.shape();
1069    if n == 0 || m < 4 || argvals.len() != m {
1070        return PeriodEstimate {
1071            period: f64::NAN,
1072            frequency: f64::NAN,
1073            power: 0.0,
1074            confidence: 0.0,
1075        };
1076    }
1077
1078    // Compute mean curve first
1079    let mean_curve = compute_mean_curve(data);
1080
1081    let (frequencies, power) = periodogram(&mean_curve, argvals);
1082
1083    if frequencies.len() < 2 {
1084        return PeriodEstimate {
1085            period: f64::NAN,
1086            frequency: f64::NAN,
1087            power: 0.0,
1088            confidence: 0.0,
1089        };
1090    }
1091
1092    // Find peak in power spectrum (skip DC component at index 0)
1093    let mut max_power = 0.0;
1094    let mut max_idx = 1;
1095    for (i, &p) in power.iter().enumerate().skip(1) {
1096        if p > max_power {
1097            max_power = p;
1098            max_idx = i;
1099        }
1100    }
1101
1102    let dominant_freq = frequencies[max_idx];
1103    let period = if dominant_freq > 1e-15 {
1104        1.0 / dominant_freq
1105    } else {
1106        f64::INFINITY
1107    };
1108
1109    // Confidence: ratio of peak power to mean power (excluding DC)
1110    let mean_power: f64 = power.iter().skip(1).sum::<f64>() / (power.len() - 1) as f64;
1111    let confidence = if mean_power > 1e-15 {
1112        max_power / mean_power
1113    } else {
1114        0.0
1115    };
1116
1117    PeriodEstimate {
1118        period,
1119        frequency: dominant_freq,
1120        power: max_power,
1121        confidence,
1122    }
1123}
1124
1125/// Estimate period using autocorrelation function.
1126///
1127/// Finds the first significant peak in the ACF after lag 0.
1128///
1129/// # Arguments
1130/// * `data` - Column-major matrix (n x m)
1131/// * `n` - Number of samples
1132/// * `m` - Number of evaluation points
1133/// * `argvals` - Evaluation points
1134/// * `max_lag` - Maximum lag to consider (in number of points)
1135pub fn estimate_period_acf(
1136    data: &[f64],
1137    n: usize,
1138    m: usize,
1139    argvals: &[f64],
1140    max_lag: usize,
1141) -> PeriodEstimate {
1142    if n == 0 || m < 4 || argvals.len() != m {
1143        return PeriodEstimate {
1144            period: f64::NAN,
1145            frequency: f64::NAN,
1146            power: 0.0,
1147            confidence: 0.0,
1148        };
1149    }
1150
1151    // Compute mean curve
1152    let mat = FdMatrix::from_slice(data, n, m).unwrap();
1153    let mean_curve = compute_mean_curve(&mat);
1154
1155    let acf = autocorrelation(&mean_curve, max_lag);
1156
1157    // Find first peak after lag 0 (skip first few lags to avoid finding lag 0)
1158    let min_lag = 2;
1159    let peaks = find_peaks_1d(&acf[min_lag..], 1);
1160
1161    if peaks.is_empty() {
1162        return PeriodEstimate {
1163            period: f64::NAN,
1164            frequency: f64::NAN,
1165            power: 0.0,
1166            confidence: 0.0,
1167        };
1168    }
1169
1170    let peak_lag = peaks[0] + min_lag;
1171    let dt = (argvals[m - 1] - argvals[0]) / (m - 1) as f64;
1172    let period = peak_lag as f64 * dt;
1173    let frequency = if period > 1e-15 { 1.0 / period } else { 0.0 };
1174
1175    PeriodEstimate {
1176        period,
1177        frequency,
1178        power: acf[peak_lag],
1179        confidence: acf[peak_lag].abs(),
1180    }
1181}
1182
1183/// Estimate period via Fourier regression grid search.
1184///
1185/// Tests multiple candidate periods and selects the one that minimizes
1186/// the reconstruction error (similar to GCV).
1187///
1188/// # Arguments
1189/// * `data` - Column-major matrix (n x m)
1190/// * `n` - Number of samples
1191/// * `m` - Number of evaluation points
1192/// * `argvals` - Evaluation points
1193/// * `period_min` - Minimum period to test
1194/// * `period_max` - Maximum period to test
1195/// * `n_candidates` - Number of candidate periods to test
1196/// * `n_harmonics` - Number of Fourier harmonics to use
1197pub fn estimate_period_regression(
1198    data: &[f64],
1199    n: usize,
1200    m: usize,
1201    argvals: &[f64],
1202    period_min: f64,
1203    period_max: f64,
1204    n_candidates: usize,
1205    n_harmonics: usize,
1206) -> PeriodEstimate {
1207    if n == 0 || m < 4 || argvals.len() != m || period_min >= period_max || n_candidates < 2 {
1208        return PeriodEstimate {
1209            period: f64::NAN,
1210            frequency: f64::NAN,
1211            power: 0.0,
1212            confidence: 0.0,
1213        };
1214    }
1215
1216    // Compute mean curve
1217    let mat = FdMatrix::from_slice(data, n, m).unwrap();
1218    let mean_curve = compute_mean_curve(&mat);
1219
1220    let nbasis = 1 + 2 * n_harmonics;
1221
1222    // Grid search over candidate periods
1223    let candidates: Vec<f64> = (0..n_candidates)
1224        .map(|i| period_min + (period_max - period_min) * i as f64 / (n_candidates - 1) as f64)
1225        .collect();
1226
1227    let results: Vec<(f64, f64)> = slice_maybe_parallel!(candidates)
1228        .map(|&period| {
1229            let basis = fourier_basis_with_period(argvals, nbasis, period);
1230
1231            // Simple least squares fit
1232            let mut rss = 0.0;
1233            for j in 0..m {
1234                let mut fitted = 0.0;
1235                // Simple: use mean of basis function times data as rough fit
1236                for k in 0..nbasis {
1237                    let b_val = basis[j + k * m];
1238                    let coef: f64 = (0..m)
1239                        .map(|l| mean_curve[l] * basis[l + k * m])
1240                        .sum::<f64>()
1241                        / (0..m)
1242                            .map(|l| basis[l + k * m].powi(2))
1243                            .sum::<f64>()
1244                            .max(1e-15);
1245                    fitted += coef * b_val;
1246                }
1247                let resid = mean_curve[j] - fitted;
1248                rss += resid * resid;
1249            }
1250
1251            (period, rss)
1252        })
1253        .collect();
1254
1255    // Find period with minimum RSS
1256    let (best_period, min_rss) = results
1257        .iter()
1258        .min_by(|a, b| a.1.partial_cmp(&b.1).unwrap_or(std::cmp::Ordering::Equal))
1259        .cloned()
1260        .unwrap_or((f64::NAN, f64::INFINITY));
1261
1262    // Confidence based on how much better the best is vs average
1263    let mean_rss: f64 = results.iter().map(|(_, r)| r).sum::<f64>() / results.len() as f64;
1264    let confidence = if min_rss > 1e-15 {
1265        (mean_rss / min_rss).min(10.0)
1266    } else {
1267        10.0
1268    };
1269
1270    PeriodEstimate {
1271        period: best_period,
1272        frequency: if best_period > 1e-15 {
1273            1.0 / best_period
1274        } else {
1275            0.0
1276        },
1277        power: 1.0 - min_rss / mean_rss,
1278        confidence,
1279    }
1280}
1281
1282/// Detect multiple concurrent periodicities using iterative residual subtraction.
1283///
1284/// This function iteratively:
1285/// 1. Estimates the dominant period using FFT
1286/// 2. Checks both FFT confidence and seasonal strength as stopping criteria
1287/// 3. Computes the amplitude and phase of the sinusoidal component
1288/// 4. Subtracts the fitted sinusoid from the signal
1289/// 5. Repeats on the residual until stopping criteria are met
1290///
1291/// # Arguments
1292/// * `data` - Column-major matrix (n x m)
1293/// * `n` - Number of samples
1294/// * `m` - Number of evaluation points
1295/// * `argvals` - Evaluation points
1296/// * `max_periods` - Maximum number of periods to detect
1297/// * `min_confidence` - Minimum FFT confidence to continue (default: 0.4)
1298/// * `min_strength` - Minimum seasonal strength to continue (default: 0.15)
1299///
1300/// # Returns
1301/// Vector of detected periods with their properties
1302pub fn detect_multiple_periods(
1303    data: &[f64],
1304    n: usize,
1305    m: usize,
1306    argvals: &[f64],
1307    max_periods: usize,
1308    min_confidence: f64,
1309    min_strength: f64,
1310) -> Vec<DetectedPeriod> {
1311    if n == 0 || m < 4 || argvals.len() != m || max_periods == 0 {
1312        return Vec::new();
1313    }
1314
1315    // Compute mean curve
1316    let mat = FdMatrix::from_slice(data, n, m).unwrap();
1317    let mean_curve = compute_mean_curve(&mat);
1318
1319    let mut residual = mean_curve.clone();
1320    let mut detected = Vec::with_capacity(max_periods);
1321
1322    for iteration in 1..=max_periods {
1323        match evaluate_next_period(
1324            &mut residual,
1325            m,
1326            argvals,
1327            min_confidence,
1328            min_strength,
1329            iteration,
1330        ) {
1331            Some(period) => detected.push(period),
1332            None => break,
1333        }
1334    }
1335
1336    detected
1337}
1338
1339/// Evaluate and extract the next dominant period from the residual signal.
1340///
1341/// Returns `None` if no significant period is found (signals iteration should stop).
1342fn evaluate_next_period(
1343    residual: &mut [f64],
1344    m: usize,
1345    argvals: &[f64],
1346    min_confidence: f64,
1347    min_strength: f64,
1348    iteration: usize,
1349) -> Option<DetectedPeriod> {
1350    let residual_mat = FdMatrix::from_slice(residual, 1, m).unwrap();
1351    let est = estimate_period_fft(&residual_mat, argvals);
1352
1353    if est.confidence < min_confidence || est.period.is_nan() || est.period.is_infinite() {
1354        return None;
1355    }
1356
1357    let strength = seasonal_strength_variance(&residual_mat, argvals, est.period, 3);
1358    if strength < min_strength || strength.is_nan() {
1359        return None;
1360    }
1361
1362    let (_a, _b, amplitude, phase) = fit_and_subtract_sinusoid(residual, argvals, est.period);
1363
1364    Some(DetectedPeriod {
1365        period: est.period,
1366        confidence: est.confidence,
1367        strength,
1368        amplitude,
1369        phase,
1370        iteration,
1371    })
1372}
1373
1374// ============================================================================
1375// Peak Detection
1376// ============================================================================
1377
1378/// Optionally smooth data using Fourier basis before peak detection.
1379fn smooth_for_peaks(
1380    data: &FdMatrix,
1381    argvals: &[f64],
1382    smooth_first: bool,
1383    smooth_nbasis: Option<usize>,
1384) -> Vec<f64> {
1385    if !smooth_first {
1386        return data.as_slice().to_vec();
1387    }
1388    let nbasis = smooth_nbasis
1389        .unwrap_or_else(|| crate::basis::select_fourier_nbasis_gcv(data, argvals, 5, 25));
1390    if let Some(result) = crate::basis::fourier_fit_1d(data, argvals, nbasis) {
1391        result.fitted.into_vec()
1392    } else {
1393        data.as_slice().to_vec()
1394    }
1395}
1396
1397/// Detect peaks in a single curve using derivative zero-crossings.
1398fn detect_peaks_single_curve(
1399    curve: &[f64],
1400    d1: &[f64],
1401    argvals: &[f64],
1402    min_dist_points: usize,
1403    min_prominence: Option<f64>,
1404    data_range: f64,
1405) -> (Vec<Peak>, Vec<f64>) {
1406    let m = curve.len();
1407    let mut peak_indices = Vec::new();
1408    for j in 1..m {
1409        if d1[j - 1] > 0.0 && d1[j] <= 0.0 {
1410            let idx = if (d1[j - 1] - d1[j]).abs() > 1e-15 {
1411                j - 1
1412            } else {
1413                j
1414            };
1415
1416            if peak_indices.is_empty()
1417                || idx - peak_indices[peak_indices.len() - 1] >= min_dist_points
1418            {
1419                peak_indices.push(idx);
1420            }
1421        }
1422    }
1423
1424    let mut peaks: Vec<Peak> = peak_indices
1425        .iter()
1426        .map(|&idx| {
1427            let prominence = compute_prominence(curve, idx) / data_range;
1428            Peak {
1429                time: argvals[idx],
1430                value: curve[idx],
1431                prominence,
1432            }
1433        })
1434        .collect();
1435
1436    if let Some(min_prom) = min_prominence {
1437        peaks.retain(|p| p.prominence >= min_prom);
1438    }
1439
1440    let distances: Vec<f64> = peaks.windows(2).map(|w| w[1].time - w[0].time).collect();
1441
1442    (peaks, distances)
1443}
1444
1445/// Detect peaks in functional data.
1446///
1447/// Uses derivative zero-crossings to find local maxima, with optional
1448/// Fourier basis smoothing and filtering by minimum distance and prominence.
1449///
1450/// # Arguments
1451/// * `data` - Column-major matrix (n x m)
1452/// * `n` - Number of samples
1453/// * `m` - Number of evaluation points
1454/// * `argvals` - Evaluation points
1455/// * `min_distance` - Minimum time between peaks (None = no constraint)
1456/// * `min_prominence` - Minimum prominence (0-1 scale, None = no filter)
1457/// * `smooth_first` - Whether to smooth data before peak detection using Fourier basis
1458/// * `smooth_nbasis` - Number of Fourier basis functions. If None and smooth_first=true,
1459///   uses GCV to automatically select optimal nbasis (range 5-25).
1460pub fn detect_peaks(
1461    data: &FdMatrix,
1462    argvals: &[f64],
1463    min_distance: Option<f64>,
1464    min_prominence: Option<f64>,
1465    smooth_first: bool,
1466    smooth_nbasis: Option<usize>,
1467) -> PeakDetectionResult {
1468    let (n, m) = data.shape();
1469    if n == 0 || m < 3 || argvals.len() != m {
1470        return PeakDetectionResult {
1471            peaks: Vec::new(),
1472            inter_peak_distances: Vec::new(),
1473            mean_period: f64::NAN,
1474        };
1475    }
1476
1477    let dt = (argvals[m - 1] - argvals[0]) / (m - 1) as f64;
1478    let min_dist_points = min_distance.map(|d| (d / dt).round() as usize).unwrap_or(1);
1479
1480    let work_data = smooth_for_peaks(data, argvals, smooth_first, smooth_nbasis);
1481
1482    // Compute first derivative
1483    let work_mat = FdMatrix::from_column_major(work_data.clone(), n, m).unwrap();
1484    let deriv1 = deriv_1d(&work_mat, argvals, 1).into_vec();
1485
1486    // Compute data range for prominence normalization
1487    let data_range: f64 = {
1488        let mut min_val = f64::INFINITY;
1489        let mut max_val = f64::NEG_INFINITY;
1490        for &v in work_data.iter() {
1491            min_val = min_val.min(v);
1492            max_val = max_val.max(v);
1493        }
1494        (max_val - min_val).max(1e-15)
1495    };
1496
1497    // Find peaks for each sample
1498    let results: Vec<(Vec<Peak>, Vec<f64>)> = iter_maybe_parallel!(0..n)
1499        .map(|i| {
1500            let curve: Vec<f64> = (0..m).map(|j| work_data[i + j * n]).collect();
1501            let d1: Vec<f64> = (0..m).map(|j| deriv1[i + j * n]).collect();
1502            detect_peaks_single_curve(
1503                &curve,
1504                &d1,
1505                argvals,
1506                min_dist_points,
1507                min_prominence,
1508                data_range,
1509            )
1510        })
1511        .collect();
1512
1513    let peaks: Vec<Vec<Peak>> = results.iter().map(|(p, _)| p.clone()).collect();
1514    let inter_peak_distances: Vec<Vec<f64>> = results.iter().map(|(_, d)| d.clone()).collect();
1515
1516    // Compute mean period from all inter-peak distances
1517    let all_distances: Vec<f64> = inter_peak_distances.iter().flatten().cloned().collect();
1518    let mean_period = if all_distances.is_empty() {
1519        f64::NAN
1520    } else {
1521        all_distances.iter().sum::<f64>() / all_distances.len() as f64
1522    };
1523
1524    PeakDetectionResult {
1525        peaks,
1526        inter_peak_distances,
1527        mean_period,
1528    }
1529}
1530
1531// ============================================================================
1532// Seasonal Strength
1533// ============================================================================
1534
1535/// Measure seasonal strength using variance decomposition.
1536///
1537/// Computes SS = Var(seasonal_component) / Var(total) where the seasonal
1538/// component is extracted using Fourier basis.
1539///
1540/// # Arguments
1541/// * `data` - Column-major matrix (n x m)
1542/// * `n` - Number of samples
1543/// * `m` - Number of evaluation points
1544/// * `argvals` - Evaluation points
1545/// * `period` - Seasonal period
1546/// * `n_harmonics` - Number of Fourier harmonics to use
1547///
1548/// # Returns
1549/// Seasonal strength in [0, 1]
1550pub fn seasonal_strength_variance(
1551    data: &FdMatrix,
1552    argvals: &[f64],
1553    period: f64,
1554    n_harmonics: usize,
1555) -> f64 {
1556    let (n, m) = data.shape();
1557    if n == 0 || m < 4 || argvals.len() != m || period <= 0.0 {
1558        return f64::NAN;
1559    }
1560
1561    // Compute mean curve
1562    let mean_curve = compute_mean_curve(data);
1563
1564    // Total variance
1565    let global_mean: f64 = mean_curve.iter().sum::<f64>() / m as f64;
1566    let total_var: f64 = mean_curve
1567        .iter()
1568        .map(|&x| (x - global_mean).powi(2))
1569        .sum::<f64>()
1570        / m as f64;
1571
1572    if total_var < 1e-15 {
1573        return 0.0;
1574    }
1575
1576    // Fit Fourier basis to extract seasonal component
1577    let nbasis = 1 + 2 * n_harmonics;
1578    let basis = fourier_basis_with_period(argvals, nbasis, period);
1579
1580    // Project data onto basis (simple least squares for mean curve)
1581    let mut seasonal = vec![0.0; m];
1582    for k in 1..nbasis {
1583        // Skip DC component
1584        let b_sum: f64 = (0..m).map(|j| basis[j + k * m].powi(2)).sum();
1585        if b_sum > 1e-15 {
1586            let coef: f64 = (0..m)
1587                .map(|j| mean_curve[j] * basis[j + k * m])
1588                .sum::<f64>()
1589                / b_sum;
1590            for j in 0..m {
1591                seasonal[j] += coef * basis[j + k * m];
1592            }
1593        }
1594    }
1595
1596    // Seasonal variance
1597    let seasonal_mean: f64 = seasonal.iter().sum::<f64>() / m as f64;
1598    let seasonal_var: f64 = seasonal
1599        .iter()
1600        .map(|&x| (x - seasonal_mean).powi(2))
1601        .sum::<f64>()
1602        / m as f64;
1603
1604    (seasonal_var / total_var).min(1.0)
1605}
1606
1607/// Measure seasonal strength using spectral method.
1608///
1609/// Computes SS = power at seasonal frequencies / total power.
1610///
1611/// # Arguments
1612/// * `data` - Column-major matrix (n x m)
1613/// * `n` - Number of samples
1614/// * `m` - Number of evaluation points
1615/// * `argvals` - Evaluation points
1616/// * `period` - Seasonal period
1617pub fn seasonal_strength_spectral(data: &FdMatrix, argvals: &[f64], period: f64) -> f64 {
1618    let (n, m) = data.shape();
1619    if n == 0 || m < 4 || argvals.len() != m || period <= 0.0 {
1620        return f64::NAN;
1621    }
1622
1623    // Compute mean curve
1624    let mean_curve = compute_mean_curve(data);
1625
1626    let (frequencies, power) = periodogram(&mean_curve, argvals);
1627
1628    if frequencies.len() < 2 {
1629        return f64::NAN;
1630    }
1631
1632    let fundamental_freq = 1.0 / period;
1633    let (seasonal_power, total_power) =
1634        sum_harmonic_power(&frequencies, &power, fundamental_freq, 0.1);
1635
1636    if total_power < 1e-15 {
1637        return 0.0;
1638    }
1639
1640    (seasonal_power / total_power).min(1.0)
1641}
1642
1643/// Compute seasonal strength using Morlet wavelet power at the target period.
1644///
1645/// This method uses the Continuous Wavelet Transform (CWT) with a Morlet wavelet
1646/// to measure power at the specified seasonal period. Unlike spectral methods,
1647/// wavelets provide time-localized frequency information.
1648///
1649/// # Arguments
1650/// * `data` - Column-major matrix (n x m)
1651/// * `n` - Number of samples
1652/// * `m` - Number of evaluation points
1653/// * `argvals` - Evaluation points
1654/// * `period` - Seasonal period in argvals units
1655///
1656/// # Returns
1657/// Seasonal strength as ratio of wavelet power to total variance (0 to 1)
1658///
1659/// # Notes
1660/// - Uses Morlet wavelet with ω₀ = 6 (standard choice)
1661/// - Scale is computed as: scale = period * ω₀ / (2π)
1662/// - Strength is computed over the interior 80% of the signal to avoid edge effects
1663pub fn seasonal_strength_wavelet(data: &FdMatrix, argvals: &[f64], period: f64) -> f64 {
1664    let (n, m) = data.shape();
1665    if n == 0 || m < 4 || argvals.len() != m || period <= 0.0 {
1666        return f64::NAN;
1667    }
1668
1669    // Compute mean curve
1670    let mean_curve = compute_mean_curve(data);
1671
1672    // Remove DC component
1673    let dc: f64 = mean_curve.iter().sum::<f64>() / m as f64;
1674    let detrended: Vec<f64> = mean_curve.iter().map(|&x| x - dc).collect();
1675
1676    // Compute total variance
1677    let total_variance: f64 = detrended.iter().map(|&x| x * x).sum::<f64>() / m as f64;
1678
1679    if total_variance < 1e-15 {
1680        return 0.0;
1681    }
1682
1683    // Compute wavelet transform at the seasonal scale
1684    let omega0 = 6.0;
1685    let scale = period * omega0 / (2.0 * PI);
1686    let wavelet_coeffs = cwt_morlet_fft(&detrended, argvals, scale, omega0);
1687
1688    if wavelet_coeffs.is_empty() {
1689        return f64::NAN;
1690    }
1691
1692    // Compute wavelet power, skipping edges (10% on each side)
1693    let (interior_start, interior_end) = match interior_bounds(m) {
1694        Some(bounds) => bounds,
1695        None => return f64::NAN,
1696    };
1697
1698    let wavelet_power: f64 = wavelet_coeffs[interior_start..interior_end]
1699        .iter()
1700        .map(|c| c.norm_sqr())
1701        .sum::<f64>()
1702        / (interior_end - interior_start) as f64;
1703
1704    // Return ratio of wavelet power to total variance
1705    // Normalize so that a pure sine at the target period gives ~1.0
1706    (wavelet_power / total_variance).sqrt().min(1.0)
1707}
1708
1709/// Compute time-varying seasonal strength using sliding windows.
1710///
1711/// # Arguments
1712/// * `data` - Column-major matrix (n x m)
1713/// * `n` - Number of samples
1714/// * `m` - Number of evaluation points
1715/// * `argvals` - Evaluation points
1716/// * `period` - Seasonal period
1717/// * `window_size` - Window width (recommended: 2 * period)
1718/// * `method` - Method for computing strength (Variance or Spectral)
1719///
1720/// # Returns
1721/// Seasonal strength at each time point
1722pub fn seasonal_strength_windowed(
1723    data: &FdMatrix,
1724    argvals: &[f64],
1725    period: f64,
1726    window_size: f64,
1727    method: StrengthMethod,
1728) -> Vec<f64> {
1729    let (n, m) = data.shape();
1730    if n == 0 || m < 4 || argvals.len() != m || period <= 0.0 || window_size <= 0.0 {
1731        return Vec::new();
1732    }
1733
1734    let dt = (argvals[m - 1] - argvals[0]) / (m - 1) as f64;
1735    let half_window_points = ((window_size / 2.0) / dt).round() as usize;
1736
1737    // Compute mean curve
1738    let mean_curve = compute_mean_curve(data);
1739
1740    iter_maybe_parallel!(0..m)
1741        .map(|center| {
1742            let start = center.saturating_sub(half_window_points);
1743            let end = (center + half_window_points + 1).min(m);
1744            let window_m = end - start;
1745
1746            if window_m < 4 {
1747                return f64::NAN;
1748            }
1749
1750            let window_data: Vec<f64> = mean_curve[start..end].to_vec();
1751            let window_argvals: Vec<f64> = argvals[start..end].to_vec();
1752
1753            // Create single-sample FdMatrix for the strength functions
1754            let single_mat = FdMatrix::from_column_major(window_data, 1, window_m).unwrap();
1755
1756            match method {
1757                StrengthMethod::Variance => {
1758                    seasonal_strength_variance(&single_mat, &window_argvals, period, 3)
1759                }
1760                StrengthMethod::Spectral => {
1761                    seasonal_strength_spectral(&single_mat, &window_argvals, period)
1762                }
1763            }
1764        })
1765        .collect()
1766}
1767
1768// ============================================================================
1769// Seasonality Change Detection
1770// ============================================================================
1771
1772/// Detect changes in seasonality.
1773///
1774/// Monitors time-varying seasonal strength and detects threshold crossings
1775/// that indicate onset or cessation of seasonality.
1776///
1777/// # Arguments
1778/// * `data` - Column-major matrix (n x m)
1779/// * `n` - Number of samples
1780/// * `m` - Number of evaluation points
1781/// * `argvals` - Evaluation points
1782/// * `period` - Seasonal period
1783/// * `threshold` - SS threshold for seasonal/non-seasonal (e.g., 0.3)
1784/// * `window_size` - Window size for local strength estimation
1785/// * `min_duration` - Minimum duration to confirm a change
1786pub fn detect_seasonality_changes(
1787    data: &FdMatrix,
1788    argvals: &[f64],
1789    period: f64,
1790    threshold: f64,
1791    window_size: f64,
1792    min_duration: f64,
1793) -> ChangeDetectionResult {
1794    let (n, m) = data.shape();
1795    if n == 0 || m < 4 || argvals.len() != m {
1796        return ChangeDetectionResult {
1797            change_points: Vec::new(),
1798            strength_curve: Vec::new(),
1799        };
1800    }
1801
1802    // Compute time-varying seasonal strength
1803    let strength_curve =
1804        seasonal_strength_windowed(data, argvals, period, window_size, StrengthMethod::Variance);
1805
1806    if strength_curve.is_empty() {
1807        return ChangeDetectionResult {
1808            change_points: Vec::new(),
1809            strength_curve: Vec::new(),
1810        };
1811    }
1812
1813    let dt = (argvals[m - 1] - argvals[0]) / (m - 1) as f64;
1814    let min_dur_points = (min_duration / dt).round() as usize;
1815
1816    let change_points =
1817        detect_threshold_crossings(&strength_curve, argvals, threshold, min_dur_points);
1818
1819    ChangeDetectionResult {
1820        change_points,
1821        strength_curve,
1822    }
1823}
1824
1825// ============================================================================
1826// Amplitude Modulation Detection
1827// ============================================================================
1828
1829/// Detect amplitude modulation in seasonal time series.
1830///
1831/// This function first checks if seasonality exists using the spectral method
1832/// (which is robust to amplitude modulation), then uses Hilbert transform to
1833/// extract the amplitude envelope and analyze modulation patterns.
1834///
1835/// # Arguments
1836/// * `data` - Column-major matrix (n x m)
1837/// * `n` - Number of samples
1838/// * `m` - Number of evaluation points
1839/// * `argvals` - Evaluation points
1840/// * `period` - Seasonal period in argvals units
1841/// * `modulation_threshold` - CV threshold for detecting modulation (default: 0.15)
1842/// * `seasonality_threshold` - Strength threshold for seasonality (default: 0.3)
1843///
1844/// # Returns
1845/// `AmplitudeModulationResult` containing detection results and diagnostics
1846///
1847/// # Example
1848/// ```ignore
1849/// let result = detect_amplitude_modulation(
1850///     &data, n, m, &argvals,
1851///     period,
1852///     0.15,          // CV > 0.15 indicates modulation
1853///     0.3,           // strength > 0.3 indicates seasonality
1854/// );
1855/// if result.has_modulation {
1856///     match result.modulation_type {
1857///         ModulationType::Emerging => println!("Seasonality is emerging"),
1858///         ModulationType::Fading => println!("Seasonality is fading"),
1859///         _ => {}
1860///     }
1861/// }
1862/// ```
1863pub fn detect_amplitude_modulation(
1864    data: &FdMatrix,
1865    argvals: &[f64],
1866    period: f64,
1867    modulation_threshold: f64,
1868    seasonality_threshold: f64,
1869) -> AmplitudeModulationResult {
1870    let (n, m) = data.shape();
1871    // Default result for invalid input
1872    let empty_result = AmplitudeModulationResult {
1873        is_seasonal: false,
1874        seasonal_strength: 0.0,
1875        has_modulation: false,
1876        modulation_type: ModulationType::NonSeasonal,
1877        modulation_score: 0.0,
1878        amplitude_trend: 0.0,
1879        strength_curve: Vec::new(),
1880        time_points: Vec::new(),
1881        min_strength: 0.0,
1882        max_strength: 0.0,
1883    };
1884
1885    if n == 0 || m < 4 || argvals.len() != m || period <= 0.0 {
1886        return empty_result;
1887    }
1888
1889    // Step 1: Check if seasonality exists using spectral method (robust to AM)
1890    let overall_strength = seasonal_strength_spectral(data, argvals, period);
1891
1892    if overall_strength < seasonality_threshold {
1893        return AmplitudeModulationResult {
1894            is_seasonal: false,
1895            seasonal_strength: overall_strength,
1896            has_modulation: false,
1897            modulation_type: ModulationType::NonSeasonal,
1898            modulation_score: 0.0,
1899            amplitude_trend: 0.0,
1900            strength_curve: Vec::new(),
1901            time_points: argvals.to_vec(),
1902            min_strength: 0.0,
1903            max_strength: 0.0,
1904        };
1905    }
1906
1907    // Step 2: Compute mean curve
1908    let mean_curve = compute_mean_curve(data);
1909
1910    // Step 3: Use Hilbert transform to get amplitude envelope
1911    let dc: f64 = mean_curve.iter().sum::<f64>() / m as f64;
1912    let detrended: Vec<f64> = mean_curve.iter().map(|&x| x - dc).collect();
1913    let analytic = hilbert_transform(&detrended);
1914    let envelope: Vec<f64> = analytic.iter().map(|c| c.norm()).collect();
1915
1916    if envelope.is_empty() {
1917        return AmplitudeModulationResult {
1918            is_seasonal: true,
1919            seasonal_strength: overall_strength,
1920            has_modulation: false,
1921            modulation_type: ModulationType::Stable,
1922            modulation_score: 0.0,
1923            amplitude_trend: 0.0,
1924            strength_curve: Vec::new(),
1925            time_points: argvals.to_vec(),
1926            min_strength: 0.0,
1927            max_strength: 0.0,
1928        };
1929    }
1930
1931    // Step 4: Smooth the envelope to reduce high-frequency noise
1932    let dt = (argvals[m - 1] - argvals[0]) / (m - 1) as f64;
1933    let smooth_window = ((period / dt) as usize).max(3);
1934    let half_window = smooth_window / 2;
1935
1936    let smoothed_envelope: Vec<f64> = (0..m)
1937        .map(|i| {
1938            let start = i.saturating_sub(half_window);
1939            let end = (i + half_window + 1).min(m);
1940            let sum: f64 = envelope[start..end].iter().sum();
1941            sum / (end - start) as f64
1942        })
1943        .collect();
1944
1945    // Step 5: Analyze envelope statistics
1946    let Some((interior_start, interior_end)) = valid_interior_bounds(m, 4) else {
1947        return AmplitudeModulationResult {
1948            is_seasonal: true,
1949            seasonal_strength: overall_strength,
1950            has_modulation: false,
1951            modulation_type: ModulationType::Stable,
1952            modulation_score: 0.0,
1953            amplitude_trend: 0.0,
1954            strength_curve: envelope,
1955            time_points: argvals.to_vec(),
1956            min_strength: 0.0,
1957            max_strength: 0.0,
1958        };
1959    };
1960
1961    let stats = analyze_amplitude_envelope(
1962        &smoothed_envelope[interior_start..interior_end],
1963        &argvals[interior_start..interior_end],
1964        modulation_threshold,
1965    );
1966
1967    AmplitudeModulationResult {
1968        is_seasonal: true,
1969        seasonal_strength: overall_strength,
1970        has_modulation: stats.has_modulation,
1971        modulation_type: stats.modulation_type,
1972        modulation_score: stats.modulation_score,
1973        amplitude_trend: stats.amplitude_trend,
1974        strength_curve: envelope,
1975        time_points: argvals.to_vec(),
1976        min_strength: stats.min_amp,
1977        max_strength: stats.max_amp,
1978    }
1979}
1980
1981/// Detect amplitude modulation using Morlet wavelet transform.
1982///
1983/// Uses continuous wavelet transform at the seasonal period to extract
1984/// time-varying amplitude. This method is more robust to noise and can
1985/// better handle non-stationary signals compared to Hilbert transform.
1986///
1987/// # Arguments
1988/// * `data` - Column-major matrix (n x m)
1989/// * `n` - Number of samples
1990/// * `m` - Number of evaluation points
1991/// * `argvals` - Evaluation points
1992/// * `period` - Seasonal period in argvals units
1993/// * `modulation_threshold` - CV threshold for detecting modulation (default: 0.15)
1994/// * `seasonality_threshold` - Strength threshold for seasonality (default: 0.3)
1995///
1996/// # Returns
1997/// `WaveletAmplitudeResult` containing detection results and wavelet amplitude curve
1998///
1999/// # Notes
2000/// - Uses Morlet wavelet with ω₀ = 6 (standard choice)
2001/// - The scale parameter is derived from the period: scale = period * ω₀ / (2π)
2002/// - This relates to how wavelets measure period: for Morlet, period ≈ scale * 2π / ω₀
2003pub fn detect_amplitude_modulation_wavelet(
2004    data: &FdMatrix,
2005    argvals: &[f64],
2006    period: f64,
2007    modulation_threshold: f64,
2008    seasonality_threshold: f64,
2009) -> WaveletAmplitudeResult {
2010    let (n, m) = data.shape();
2011    let empty_result = WaveletAmplitudeResult {
2012        is_seasonal: false,
2013        seasonal_strength: 0.0,
2014        has_modulation: false,
2015        modulation_type: ModulationType::NonSeasonal,
2016        modulation_score: 0.0,
2017        amplitude_trend: 0.0,
2018        wavelet_amplitude: Vec::new(),
2019        time_points: Vec::new(),
2020        scale: 0.0,
2021    };
2022
2023    if n == 0 || m < 4 || argvals.len() != m || period <= 0.0 {
2024        return empty_result;
2025    }
2026
2027    // Step 1: Check if seasonality exists using spectral method
2028    let overall_strength = seasonal_strength_spectral(data, argvals, period);
2029
2030    if overall_strength < seasonality_threshold {
2031        return WaveletAmplitudeResult {
2032            is_seasonal: false,
2033            seasonal_strength: overall_strength,
2034            has_modulation: false,
2035            modulation_type: ModulationType::NonSeasonal,
2036            modulation_score: 0.0,
2037            amplitude_trend: 0.0,
2038            wavelet_amplitude: Vec::new(),
2039            time_points: argvals.to_vec(),
2040            scale: 0.0,
2041        };
2042    }
2043
2044    // Step 2: Compute mean curve
2045    let mean_curve = compute_mean_curve(data);
2046
2047    // Remove DC component
2048    let dc: f64 = mean_curve.iter().sum::<f64>() / m as f64;
2049    let detrended: Vec<f64> = mean_curve.iter().map(|&x| x - dc).collect();
2050
2051    // Step 3: Compute wavelet transform at the seasonal period
2052    // For Morlet wavelet: period = scale * 2π / ω₀, so scale = period * ω₀ / (2π)
2053    let omega0 = 6.0; // Standard Morlet parameter
2054    let scale = period * omega0 / (2.0 * PI);
2055
2056    // Use FFT-based CWT for efficiency
2057    let wavelet_coeffs = cwt_morlet_fft(&detrended, argvals, scale, omega0);
2058
2059    if wavelet_coeffs.is_empty() {
2060        return WaveletAmplitudeResult {
2061            is_seasonal: true,
2062            seasonal_strength: overall_strength,
2063            has_modulation: false,
2064            modulation_type: ModulationType::Stable,
2065            modulation_score: 0.0,
2066            amplitude_trend: 0.0,
2067            wavelet_amplitude: Vec::new(),
2068            time_points: argvals.to_vec(),
2069            scale,
2070        };
2071    }
2072
2073    // Step 4: Extract amplitude (magnitude of wavelet coefficients)
2074    let wavelet_amplitude: Vec<f64> = wavelet_coeffs.iter().map(|c| c.norm()).collect();
2075
2076    // Step 5: Analyze amplitude envelope statistics (skip edges)
2077    let Some((interior_start, interior_end)) = valid_interior_bounds(m, 4) else {
2078        return WaveletAmplitudeResult {
2079            is_seasonal: true,
2080            seasonal_strength: overall_strength,
2081            has_modulation: false,
2082            modulation_type: ModulationType::Stable,
2083            modulation_score: 0.0,
2084            amplitude_trend: 0.0,
2085            wavelet_amplitude,
2086            time_points: argvals.to_vec(),
2087            scale,
2088        };
2089    };
2090
2091    let stats = analyze_amplitude_envelope(
2092        &wavelet_amplitude[interior_start..interior_end],
2093        &argvals[interior_start..interior_end],
2094        modulation_threshold,
2095    );
2096
2097    WaveletAmplitudeResult {
2098        is_seasonal: true,
2099        seasonal_strength: overall_strength,
2100        has_modulation: stats.has_modulation,
2101        modulation_type: stats.modulation_type,
2102        modulation_score: stats.modulation_score,
2103        amplitude_trend: stats.amplitude_trend,
2104        wavelet_amplitude,
2105        time_points: argvals.to_vec(),
2106        scale,
2107    }
2108}
2109
2110// ============================================================================
2111// Instantaneous Period
2112// ============================================================================
2113
2114/// Estimate instantaneous period using Hilbert transform.
2115///
2116/// For series with drifting/changing period, this computes the period
2117/// at each time point using the analytic signal.
2118///
2119/// # Arguments
2120/// * `data` - Column-major matrix (n x m)
2121/// * `n` - Number of samples
2122/// * `m` - Number of evaluation points
2123/// * `argvals` - Evaluation points
2124pub fn instantaneous_period(data: &FdMatrix, argvals: &[f64]) -> InstantaneousPeriod {
2125    let (n, m) = data.shape();
2126    if n == 0 || m < 4 || argvals.len() != m {
2127        return InstantaneousPeriod {
2128            period: Vec::new(),
2129            frequency: Vec::new(),
2130            amplitude: Vec::new(),
2131        };
2132    }
2133
2134    // Compute mean curve
2135    let mean_curve = compute_mean_curve(data);
2136
2137    // Remove DC component (detrend by subtracting mean)
2138    let dc: f64 = mean_curve.iter().sum::<f64>() / m as f64;
2139    let detrended: Vec<f64> = mean_curve.iter().map(|&x| x - dc).collect();
2140
2141    // Compute analytic signal via Hilbert transform
2142    let analytic = hilbert_transform(&detrended);
2143
2144    // Extract instantaneous amplitude and phase
2145    let amplitude: Vec<f64> = analytic.iter().map(|c| c.norm()).collect();
2146
2147    let phase: Vec<f64> = analytic.iter().map(|c| c.im.atan2(c.re)).collect();
2148
2149    // Unwrap phase
2150    let unwrapped_phase = unwrap_phase(&phase);
2151
2152    // Compute instantaneous frequency (derivative of phase)
2153    let dt = (argvals[m - 1] - argvals[0]) / (m - 1) as f64;
2154    let mut inst_freq = vec![0.0; m];
2155
2156    // Central differences for interior, forward/backward at boundaries
2157    if m > 1 {
2158        inst_freq[0] = (unwrapped_phase[1] - unwrapped_phase[0]) / dt / (2.0 * PI);
2159    }
2160    for j in 1..(m - 1) {
2161        inst_freq[j] = (unwrapped_phase[j + 1] - unwrapped_phase[j - 1]) / (2.0 * dt) / (2.0 * PI);
2162    }
2163    if m > 1 {
2164        inst_freq[m - 1] = (unwrapped_phase[m - 1] - unwrapped_phase[m - 2]) / dt / (2.0 * PI);
2165    }
2166
2167    // Period = 1/frequency (handle near-zero frequencies)
2168    let period: Vec<f64> = inst_freq
2169        .iter()
2170        .map(|&f| {
2171            if f.abs() > 1e-10 {
2172                (1.0 / f).abs()
2173            } else {
2174                f64::INFINITY
2175            }
2176        })
2177        .collect();
2178
2179    InstantaneousPeriod {
2180        period,
2181        frequency: inst_freq,
2182        amplitude,
2183    }
2184}
2185
2186// ============================================================================
2187// Peak Timing Variability Analysis
2188// ============================================================================
2189
2190/// Analyze peak timing variability across cycles.
2191///
2192/// For short series (e.g., 3-5 years of yearly data), this function detects
2193/// one peak per cycle and analyzes how peak timing varies between cycles.
2194///
2195/// # Arguments
2196/// * `data` - Column-major matrix (n x m)
2197/// * `n` - Number of samples
2198/// * `m` - Number of evaluation points
2199/// * `argvals` - Evaluation points
2200/// * `period` - Known period (e.g., 365 for daily data with yearly seasonality)
2201/// * `smooth_nbasis` - Number of Fourier basis functions for smoothing.
2202///   If None, uses GCV for automatic selection.
2203///
2204/// # Returns
2205/// Peak timing result with variability metrics
2206pub fn analyze_peak_timing(
2207    data: &FdMatrix,
2208    argvals: &[f64],
2209    period: f64,
2210    smooth_nbasis: Option<usize>,
2211) -> PeakTimingResult {
2212    let (n, m) = data.shape();
2213    if n == 0 || m < 3 || argvals.len() != m || period <= 0.0 {
2214        return PeakTimingResult {
2215            peak_times: Vec::new(),
2216            peak_values: Vec::new(),
2217            normalized_timing: Vec::new(),
2218            mean_timing: f64::NAN,
2219            std_timing: f64::NAN,
2220            range_timing: f64::NAN,
2221            variability_score: f64::NAN,
2222            timing_trend: f64::NAN,
2223            cycle_indices: Vec::new(),
2224        };
2225    }
2226
2227    // Detect peaks with minimum distance constraint of 0.7 * period
2228    // This ensures we get at most one peak per cycle
2229    let min_distance = period * 0.7;
2230    let peaks = detect_peaks(
2231        data,
2232        argvals,
2233        Some(min_distance),
2234        None, // No prominence filter
2235        true, // Smooth first with Fourier basis
2236        smooth_nbasis,
2237    );
2238
2239    // Use the first sample's peaks (for mean curve analysis)
2240    // If multiple samples, we take the mean curve which is effectively in sample 0
2241    let sample_peaks = if peaks.peaks.is_empty() {
2242        Vec::new()
2243    } else {
2244        peaks.peaks[0].clone()
2245    };
2246
2247    if sample_peaks.is_empty() {
2248        return PeakTimingResult {
2249            peak_times: Vec::new(),
2250            peak_values: Vec::new(),
2251            normalized_timing: Vec::new(),
2252            mean_timing: f64::NAN,
2253            std_timing: f64::NAN,
2254            range_timing: f64::NAN,
2255            variability_score: f64::NAN,
2256            timing_trend: f64::NAN,
2257            cycle_indices: Vec::new(),
2258        };
2259    }
2260
2261    let peak_times: Vec<f64> = sample_peaks.iter().map(|p| p.time).collect();
2262    let peak_values: Vec<f64> = sample_peaks.iter().map(|p| p.value).collect();
2263
2264    // Compute normalized timing (position within cycle, 0-1 scale)
2265    let t_start = argvals[0];
2266    let normalized_timing: Vec<f64> = peak_times
2267        .iter()
2268        .map(|&t| {
2269            let cycle_pos = (t - t_start) % period;
2270            cycle_pos / period
2271        })
2272        .collect();
2273
2274    // Compute cycle indices (1-indexed)
2275    let cycle_indices: Vec<usize> = peak_times
2276        .iter()
2277        .map(|&t| ((t - t_start) / period).floor() as usize + 1)
2278        .collect();
2279
2280    // Compute statistics
2281    let n_peaks = normalized_timing.len() as f64;
2282    let mean_timing = normalized_timing.iter().sum::<f64>() / n_peaks;
2283
2284    let variance: f64 = normalized_timing
2285        .iter()
2286        .map(|&x| (x - mean_timing).powi(2))
2287        .sum::<f64>()
2288        / n_peaks;
2289    let std_timing = variance.sqrt();
2290
2291    let min_timing = normalized_timing
2292        .iter()
2293        .cloned()
2294        .fold(f64::INFINITY, f64::min);
2295    let max_timing = normalized_timing
2296        .iter()
2297        .cloned()
2298        .fold(f64::NEG_INFINITY, f64::max);
2299    let range_timing = max_timing - min_timing;
2300
2301    // Variability score: normalized std deviation
2302    // Max possible std for uniform in [0,1] is ~0.289, so we scale by that
2303    // But since peaks cluster, we use 0.1 as "high" variability threshold
2304    let variability_score = (std_timing / 0.1).min(1.0);
2305
2306    // Timing trend: linear regression of normalized timing on cycle index
2307    let cycle_idx_f64: Vec<f64> = cycle_indices.iter().map(|&i| i as f64).collect();
2308    let timing_trend = linear_slope(&cycle_idx_f64, &normalized_timing);
2309
2310    PeakTimingResult {
2311        peak_times,
2312        peak_values,
2313        normalized_timing,
2314        mean_timing,
2315        std_timing,
2316        range_timing,
2317        variability_score,
2318        timing_trend,
2319        cycle_indices,
2320    }
2321}
2322
2323// ============================================================================
2324// Seasonality Classification
2325// ============================================================================
2326
2327/// Classify the type of seasonality in functional data.
2328///
2329/// This is particularly useful for short series (3-5 years) where you need
2330/// to identify:
2331/// - Whether seasonality is present
2332/// - Whether peak timing is stable or variable
2333/// - Which cycles have weak or missing seasonality
2334///
2335/// # Arguments
2336/// * `data` - Column-major matrix (n x m)
2337/// * `n` - Number of samples
2338/// * `m` - Number of evaluation points
2339/// * `argvals` - Evaluation points
2340/// * `period` - Known seasonal period
2341/// * `strength_threshold` - Threshold for seasonal/non-seasonal (default: 0.3)
2342/// * `timing_threshold` - Max std of normalized timing for "stable" (default: 0.05)
2343///
2344/// # Returns
2345/// Seasonality classification with type and diagnostics
2346pub fn classify_seasonality(
2347    data: &FdMatrix,
2348    argvals: &[f64],
2349    period: f64,
2350    strength_threshold: Option<f64>,
2351    timing_threshold: Option<f64>,
2352) -> SeasonalityClassification {
2353    let strength_thresh = strength_threshold.unwrap_or(0.3);
2354    let timing_thresh = timing_threshold.unwrap_or(0.05);
2355
2356    let (n, m) = data.shape();
2357    if n == 0 || m < 4 || argvals.len() != m || period <= 0.0 {
2358        return SeasonalityClassification {
2359            is_seasonal: false,
2360            has_stable_timing: false,
2361            timing_variability: f64::NAN,
2362            seasonal_strength: f64::NAN,
2363            cycle_strengths: Vec::new(),
2364            weak_seasons: Vec::new(),
2365            classification: SeasonalType::NonSeasonal,
2366            peak_timing: None,
2367        };
2368    }
2369
2370    // Compute overall seasonal strength
2371    let overall_strength = seasonal_strength_variance(data, argvals, period, 3);
2372
2373    let (cycle_strengths, weak_seasons) =
2374        compute_cycle_strengths(data, argvals, period, strength_thresh);
2375    let n_cycles = cycle_strengths.len();
2376
2377    // Analyze peak timing
2378    let peak_timing = analyze_peak_timing(data, argvals, period, None);
2379
2380    // Determine classification
2381    let is_seasonal = overall_strength >= strength_thresh;
2382    let has_stable_timing = peak_timing.std_timing <= timing_thresh;
2383    let timing_variability = peak_timing.variability_score;
2384
2385    // Classify based on patterns
2386    let n_weak = weak_seasons.len();
2387    let classification = if !is_seasonal {
2388        SeasonalType::NonSeasonal
2389    } else if n_cycles > 0 && n_weak as f64 / n_cycles as f64 > 0.3 {
2390        // More than 30% of cycles are weak
2391        SeasonalType::IntermittentSeasonal
2392    } else if !has_stable_timing {
2393        SeasonalType::VariableTiming
2394    } else {
2395        SeasonalType::StableSeasonal
2396    };
2397
2398    SeasonalityClassification {
2399        is_seasonal,
2400        has_stable_timing,
2401        timing_variability,
2402        seasonal_strength: overall_strength,
2403        cycle_strengths,
2404        weak_seasons,
2405        classification,
2406        peak_timing: Some(peak_timing),
2407    }
2408}
2409
2410/// Detect seasonality changes with automatic threshold selection.
2411///
2412/// Uses Otsu's method or percentile-based threshold instead of a fixed value.
2413///
2414/// # Arguments
2415/// * `data` - Column-major matrix (n x m)
2416/// * `n` - Number of samples
2417/// * `m` - Number of evaluation points
2418/// * `argvals` - Evaluation points
2419/// * `period` - Seasonal period
2420/// * `threshold_method` - Method for threshold selection
2421/// * `window_size` - Window size for local strength estimation
2422/// * `min_duration` - Minimum duration to confirm a change
2423pub fn detect_seasonality_changes_auto(
2424    data: &FdMatrix,
2425    argvals: &[f64],
2426    period: f64,
2427    threshold_method: ThresholdMethod,
2428    window_size: f64,
2429    min_duration: f64,
2430) -> ChangeDetectionResult {
2431    let (n, m) = data.shape();
2432    if n == 0 || m < 4 || argvals.len() != m {
2433        return ChangeDetectionResult {
2434            change_points: Vec::new(),
2435            strength_curve: Vec::new(),
2436        };
2437    }
2438
2439    // Compute time-varying seasonal strength
2440    let strength_curve =
2441        seasonal_strength_windowed(data, argvals, period, window_size, StrengthMethod::Variance);
2442
2443    if strength_curve.is_empty() {
2444        return ChangeDetectionResult {
2445            change_points: Vec::new(),
2446            strength_curve: Vec::new(),
2447        };
2448    }
2449
2450    // Determine threshold
2451    let threshold = match threshold_method {
2452        ThresholdMethod::Fixed(t) => t,
2453        ThresholdMethod::Percentile(p) => {
2454            let mut sorted: Vec<f64> = strength_curve
2455                .iter()
2456                .copied()
2457                .filter(|x| x.is_finite())
2458                .collect();
2459            sorted.sort_by(|a, b| a.partial_cmp(b).unwrap_or(std::cmp::Ordering::Equal));
2460            if sorted.is_empty() {
2461                0.5
2462            } else {
2463                let idx = ((p / 100.0) * sorted.len() as f64) as usize;
2464                sorted[idx.min(sorted.len() - 1)]
2465            }
2466        }
2467        ThresholdMethod::Otsu => otsu_threshold(&strength_curve),
2468    };
2469
2470    // Now use the regular detection with computed threshold
2471    detect_seasonality_changes(data, argvals, period, threshold, window_size, min_duration)
2472}
2473
2474/// Result of SAZED ensemble period detection.
2475#[derive(Debug, Clone)]
2476pub struct SazedResult {
2477    /// Primary detected period (consensus from ensemble)
2478    pub period: f64,
2479    /// Confidence score (0-1, based on component agreement)
2480    pub confidence: f64,
2481    /// Periods detected by each component (may be NaN if not detected)
2482    pub component_periods: SazedComponents,
2483    /// Number of components that agreed on the final period
2484    pub agreeing_components: usize,
2485}
2486
2487/// Individual period estimates from each SAZED component.
2488#[derive(Debug, Clone)]
2489pub struct SazedComponents {
2490    /// Period from spectral (FFT) detection
2491    pub spectral: f64,
2492    /// Period from ACF peak detection
2493    pub acf_peak: f64,
2494    /// Period from weighted ACF average
2495    pub acf_average: f64,
2496    /// Period from ACF zero-crossing analysis
2497    pub zero_crossing: f64,
2498    /// Period from spectral differencing
2499    pub spectral_diff: f64,
2500}
2501
2502/// SAZED: Spectral-ACF Zero-crossing Ensemble Detection
2503///
2504/// A parameter-free ensemble method for robust period detection.
2505/// Combines 5 detection components:
2506/// 1. Spectral (FFT) - peaks in periodogram
2507/// 2. ACF peak - first significant peak in autocorrelation
2508/// 3. ACF average - weighted mean of ACF peaks
2509/// 4. Zero-crossing - period from ACF zero crossings
2510/// 5. Spectral differencing - FFT on first-differenced signal
2511///
2512/// Each component provides both a period estimate and a confidence score.
2513/// Only components with sufficient confidence participate in voting.
2514/// The final period is chosen by majority voting with tolerance.
2515///
2516/// # Arguments
2517/// * `data` - Input signal (1D time series or mean curve from fdata)
2518/// * `argvals` - Time points corresponding to data
2519/// * `tolerance` - Relative tolerance for considering periods equal (default: 0.05 = 5%)
2520///
2521/// # Returns
2522/// * `SazedResult` containing the consensus period and component details
2523pub fn sazed(data: &[f64], argvals: &[f64], tolerance: Option<f64>) -> SazedResult {
2524    let n = data.len();
2525    let tol = tolerance.unwrap_or(0.05); // Tighter default tolerance
2526
2527    if n < 8 || argvals.len() != n {
2528        return SazedResult {
2529            period: f64::NAN,
2530            confidence: 0.0,
2531            component_periods: SazedComponents {
2532                spectral: f64::NAN,
2533                acf_peak: f64::NAN,
2534                acf_average: f64::NAN,
2535                zero_crossing: f64::NAN,
2536                spectral_diff: f64::NAN,
2537            },
2538            agreeing_components: 0,
2539        };
2540    }
2541
2542    let dt = (argvals[n - 1] - argvals[0]) / (n - 1) as f64;
2543    let max_lag = (n / 2).max(4);
2544    let signal_range = argvals[n - 1] - argvals[0];
2545
2546    // Minimum detectable period (at least 3 cycles)
2547    let min_period = signal_range / (n as f64 / 3.0);
2548    // Maximum detectable period (at most 2 complete cycles)
2549    let max_period = signal_range / 2.0;
2550
2551    // Component 1: Spectral (FFT) detection with confidence
2552    let (spectral_period, spectral_conf) = sazed_spectral_with_confidence(data, argvals);
2553
2554    // Component 2: ACF peak detection with confidence
2555    let (acf_peak_period, acf_peak_conf) = sazed_acf_peak_with_confidence(data, dt, max_lag);
2556
2557    // Component 3: ACF weighted average (uses ACF peak confidence)
2558    let acf_average_period = sazed_acf_average(data, dt, max_lag);
2559
2560    // Component 4: Zero-crossing analysis with confidence
2561    let (zero_crossing_period, zero_crossing_conf) =
2562        sazed_zero_crossing_with_confidence(data, dt, max_lag);
2563
2564    // Component 5: Spectral on differenced signal with confidence
2565    let (spectral_diff_period, spectral_diff_conf) =
2566        sazed_spectral_diff_with_confidence(data, argvals);
2567
2568    let components = SazedComponents {
2569        spectral: spectral_period,
2570        acf_peak: acf_peak_period,
2571        acf_average: acf_average_period,
2572        zero_crossing: zero_crossing_period,
2573        spectral_diff: spectral_diff_period,
2574    };
2575
2576    // Confidence thresholds for each component (tuned to minimize FPR on noise)
2577    // For Gaussian noise: spectral peaks rarely exceed 6x median, ACF ~1/sqrt(n)
2578    let spectral_thresh = 8.0; // Power ratio must be > 8x median (noise rarely exceeds 6x)
2579    let acf_thresh = 0.3; // ACF correlation must be > 0.3 (noise ~0.1 for n=100)
2580    let zero_crossing_thresh = 0.9; // Zero-crossing consistency > 90%
2581    let spectral_diff_thresh = 6.0; // Diff spectral power ratio > 6x
2582
2583    // Minimum number of confident components required to report a period
2584    let min_confident_components = 2;
2585
2586    // Collect valid periods (only from components with sufficient confidence)
2587    let confident_periods: Vec<f64> = [
2588        validate_sazed_component(
2589            spectral_period,
2590            spectral_conf,
2591            min_period,
2592            max_period,
2593            spectral_thresh,
2594        ),
2595        validate_sazed_component(
2596            acf_peak_period,
2597            acf_peak_conf,
2598            min_period,
2599            max_period,
2600            acf_thresh,
2601        ),
2602        validate_sazed_component(
2603            acf_average_period,
2604            acf_peak_conf,
2605            min_period,
2606            max_period,
2607            acf_thresh,
2608        ),
2609        validate_sazed_component(
2610            zero_crossing_period,
2611            zero_crossing_conf,
2612            min_period,
2613            max_period,
2614            zero_crossing_thresh,
2615        ),
2616        validate_sazed_component(
2617            spectral_diff_period,
2618            spectral_diff_conf,
2619            min_period,
2620            max_period,
2621            spectral_diff_thresh,
2622        ),
2623    ]
2624    .into_iter()
2625    .flatten()
2626    .collect();
2627
2628    // Require minimum number of confident components before reporting a period
2629    if confident_periods.len() < min_confident_components {
2630        return SazedResult {
2631            period: f64::NAN,
2632            confidence: 0.0,
2633            component_periods: components,
2634            agreeing_components: confident_periods.len(),
2635        };
2636    }
2637
2638    // Ensemble voting: find the mode with tolerance
2639    let (consensus_period, agreeing_count) = find_consensus_period(&confident_periods, tol);
2640    let confidence = agreeing_count as f64 / 5.0;
2641
2642    SazedResult {
2643        period: consensus_period,
2644        confidence,
2645        component_periods: components,
2646        agreeing_components: agreeing_count,
2647    }
2648}
2649
2650/// SAZED for functional data (matrix format)
2651///
2652/// Computes mean curve first, then applies SAZED.
2653///
2654/// # Arguments
2655/// * `data` - Column-major matrix (n x m)
2656/// * `n` - Number of samples
2657/// * `m` - Number of evaluation points
2658/// * `argvals` - Evaluation points
2659/// * `tolerance` - Relative tolerance for period matching
2660pub fn sazed_fdata(data: &FdMatrix, argvals: &[f64], tolerance: Option<f64>) -> SazedResult {
2661    let (n, m) = data.shape();
2662    if n == 0 || m < 8 || argvals.len() != m {
2663        return SazedResult {
2664            period: f64::NAN,
2665            confidence: 0.0,
2666            component_periods: SazedComponents {
2667                spectral: f64::NAN,
2668                acf_peak: f64::NAN,
2669                acf_average: f64::NAN,
2670                zero_crossing: f64::NAN,
2671                spectral_diff: f64::NAN,
2672            },
2673            agreeing_components: 0,
2674        };
2675    }
2676
2677    let mean_curve = compute_mean_curve(data);
2678    sazed(&mean_curve, argvals, tolerance)
2679}
2680
2681/// Spectral component with confidence: returns (period, power_ratio)
2682fn sazed_spectral_with_confidence(data: &[f64], argvals: &[f64]) -> (f64, f64) {
2683    let (frequencies, power) = periodogram(data, argvals);
2684
2685    if frequencies.len() < 3 {
2686        return (f64::NAN, 0.0);
2687    }
2688
2689    // Find peaks in power spectrum (skip DC)
2690    let power_no_dc: Vec<f64> = power.iter().skip(1).copied().collect();
2691
2692    if power_no_dc.is_empty() {
2693        return (f64::NAN, 0.0);
2694    }
2695
2696    // Calculate noise floor as median
2697    let mut sorted_power = power_no_dc.clone();
2698    sorted_power.sort_by(|a, b| a.partial_cmp(b).unwrap_or(std::cmp::Ordering::Equal));
2699    let noise_floor = sorted_power[sorted_power.len() / 2].max(1e-15);
2700
2701    // Find global maximum
2702    let mut max_idx = 0;
2703    let mut max_val = 0.0;
2704    for (i, &p) in power_no_dc.iter().enumerate() {
2705        if p > max_val {
2706            max_val = p;
2707            max_idx = i;
2708        }
2709    }
2710
2711    let power_ratio = max_val / noise_floor;
2712    let freq = frequencies[max_idx + 1];
2713
2714    if freq > 1e-15 {
2715        (1.0 / freq, power_ratio)
2716    } else {
2717        (f64::NAN, 0.0)
2718    }
2719}
2720
2721/// ACF peak component with confidence: returns (period, acf_value_at_peak)
2722fn sazed_acf_peak_with_confidence(data: &[f64], dt: f64, max_lag: usize) -> (f64, f64) {
2723    let acf = autocorrelation(data, max_lag);
2724
2725    match find_first_acf_peak(&acf) {
2726        Some((peak_lag, acf_value)) => (peak_lag as f64 * dt, acf_value),
2727        None => (f64::NAN, 0.0),
2728    }
2729}
2730
2731/// ACF average component: weighted mean of ACF peak locations
2732fn sazed_acf_average(data: &[f64], dt: f64, max_lag: usize) -> f64 {
2733    let acf = autocorrelation(data, max_lag);
2734
2735    if acf.len() < 4 {
2736        return f64::NAN;
2737    }
2738
2739    // Find all peaks in ACF
2740    let peaks = find_peaks_1d(&acf[1..], 1);
2741
2742    if peaks.is_empty() {
2743        return f64::NAN;
2744    }
2745
2746    // Weight peaks by their ACF value
2747    let mut weighted_sum = 0.0;
2748    let mut weight_sum = 0.0;
2749
2750    for (i, &peak_idx) in peaks.iter().enumerate() {
2751        let lag = peak_idx + 1;
2752        let weight = acf[lag].max(0.0);
2753
2754        if i == 0 {
2755            // First peak is the fundamental period
2756            weighted_sum += lag as f64 * weight;
2757            weight_sum += weight;
2758        } else {
2759            // Later peaks: estimate fundamental by dividing by harmonic number
2760            let expected_fundamental = peaks[0] + 1;
2761            let harmonic = ((lag as f64 / expected_fundamental as f64) + 0.5) as usize;
2762            if harmonic > 0 {
2763                let fundamental_est = lag as f64 / harmonic as f64;
2764                weighted_sum += fundamental_est * weight;
2765                weight_sum += weight;
2766            }
2767        }
2768    }
2769
2770    if weight_sum > 1e-15 {
2771        weighted_sum / weight_sum * dt
2772    } else {
2773        f64::NAN
2774    }
2775}
2776
2777/// Zero-crossing component with confidence: returns (period, consistency)
2778/// Consistency is how regular the zero crossings are (std/mean of half-periods)
2779fn sazed_zero_crossing_with_confidence(data: &[f64], dt: f64, max_lag: usize) -> (f64, f64) {
2780    let acf = autocorrelation(data, max_lag);
2781
2782    if acf.len() < 4 {
2783        return (f64::NAN, 0.0);
2784    }
2785
2786    // Find zero crossings (sign changes)
2787    let mut crossings = Vec::new();
2788    for i in 1..acf.len() {
2789        if acf[i - 1] * acf[i] < 0.0 {
2790            // Linear interpolation for more precise crossing
2791            let frac = acf[i - 1].abs() / (acf[i - 1].abs() + acf[i].abs());
2792            crossings.push((i - 1) as f64 + frac);
2793        }
2794    }
2795
2796    if crossings.len() < 2 {
2797        return (f64::NAN, 0.0);
2798    }
2799
2800    // Period is twice the distance between consecutive zero crossings
2801    // (ACF goes through two zero crossings per period)
2802    let mut half_periods = Vec::new();
2803    for i in 1..crossings.len() {
2804        half_periods.push(crossings[i] - crossings[i - 1]);
2805    }
2806
2807    if half_periods.is_empty() {
2808        return (f64::NAN, 0.0);
2809    }
2810
2811    // Calculate consistency: 1 - (std/mean) of half-periods
2812    // High consistency means regular zero crossings
2813    let mean: f64 = half_periods.iter().sum::<f64>() / half_periods.len() as f64;
2814    let variance: f64 =
2815        half_periods.iter().map(|x| (x - mean).powi(2)).sum::<f64>() / half_periods.len() as f64;
2816    let std = variance.sqrt();
2817    let consistency = (1.0 - std / mean.max(1e-15)).clamp(0.0, 1.0);
2818
2819    // Median half-period
2820    half_periods.sort_by(|a, b| a.partial_cmp(b).unwrap_or(std::cmp::Ordering::Equal));
2821    let median_half = half_periods[half_periods.len() / 2];
2822
2823    (2.0 * median_half * dt, consistency)
2824}
2825
2826/// Spectral differencing with confidence: returns (period, power_ratio)
2827fn sazed_spectral_diff_with_confidence(data: &[f64], argvals: &[f64]) -> (f64, f64) {
2828    if data.len() < 4 {
2829        return (f64::NAN, 0.0);
2830    }
2831
2832    // First difference to remove trend
2833    let diff: Vec<f64> = data.windows(2).map(|w| w[1] - w[0]).collect();
2834    let diff_argvals: Vec<f64> = argvals.windows(2).map(|w| (w[0] + w[1]) / 2.0).collect();
2835
2836    sazed_spectral_with_confidence(&diff, &diff_argvals)
2837}
2838
2839/// Find peaks in power spectrum above noise floor
2840fn find_spectral_peaks(power: &[f64]) -> Vec<usize> {
2841    if power.len() < 3 {
2842        return Vec::new();
2843    }
2844
2845    // Estimate noise floor as median power
2846    let mut sorted_power: Vec<f64> = power.to_vec();
2847    sorted_power.sort_by(|a, b| a.partial_cmp(b).unwrap_or(std::cmp::Ordering::Equal));
2848    let noise_floor = sorted_power[sorted_power.len() / 2];
2849    let threshold = noise_floor * 2.0; // Peaks must be at least 2x median
2850
2851    // Find all local maxima above threshold
2852    let mut peaks: Vec<(usize, f64)> = Vec::new();
2853    for i in 1..(power.len() - 1) {
2854        if power[i] > power[i - 1] && power[i] > power[i + 1] && power[i] > threshold {
2855            peaks.push((i, power[i]));
2856        }
2857    }
2858
2859    // Sort by power (descending)
2860    peaks.sort_by(|a, b| b.1.partial_cmp(&a.1).unwrap_or(std::cmp::Ordering::Equal));
2861
2862    peaks.into_iter().map(|(idx, _)| idx).collect()
2863}
2864
2865/// Find consensus period from multiple estimates using tolerance-based voting
2866fn find_consensus_period(periods: &[f64], tolerance: f64) -> (f64, usize) {
2867    if periods.is_empty() {
2868        return (f64::NAN, 0);
2869    }
2870    if periods.len() == 1 {
2871        return (periods[0], 1);
2872    }
2873
2874    let mut best_period = periods[0];
2875    let mut best_count = 0;
2876    let mut best_sum = 0.0;
2877
2878    for &p1 in periods {
2879        let (count, sum) = count_agreeing_periods(periods, p1, tolerance);
2880
2881        if count > best_count
2882            || (count == best_count && sum / count as f64 > best_sum / best_count.max(1) as f64)
2883        {
2884            best_count = count;
2885            best_period = sum / count as f64;
2886            best_sum = sum;
2887        }
2888    }
2889
2890    (best_period, best_count)
2891}
2892
2893/// Result of Autoperiod detection.
2894#[derive(Debug, Clone)]
2895pub struct AutoperiodResult {
2896    /// Detected period
2897    pub period: f64,
2898    /// Combined confidence (FFT * ACF validation)
2899    pub confidence: f64,
2900    /// FFT power at the detected period
2901    pub fft_power: f64,
2902    /// ACF validation score (0-1)
2903    pub acf_validation: f64,
2904    /// All candidate periods considered
2905    pub candidates: Vec<AutoperiodCandidate>,
2906}
2907
2908/// A candidate period from Autoperiod detection.
2909#[derive(Debug, Clone)]
2910pub struct AutoperiodCandidate {
2911    /// Candidate period
2912    pub period: f64,
2913    /// FFT power
2914    pub fft_power: f64,
2915    /// ACF validation score
2916    pub acf_score: f64,
2917    /// Combined score (power * validation)
2918    pub combined_score: f64,
2919}
2920
2921fn empty_autoperiod_result() -> AutoperiodResult {
2922    AutoperiodResult {
2923        period: f64::NAN,
2924        confidence: 0.0,
2925        fft_power: 0.0,
2926        acf_validation: 0.0,
2927        candidates: Vec::new(),
2928    }
2929}
2930
2931/// Build an autoperiod candidate from a spectral peak, refining with gradient ascent on ACF.
2932fn build_autoperiod_candidate(
2933    peak_idx: usize,
2934    frequencies: &[f64],
2935    power_no_dc: &[f64],
2936    acf: &[f64],
2937    dt: f64,
2938    steps: usize,
2939    total_power: f64,
2940) -> Option<AutoperiodCandidate> {
2941    let freq = frequencies[peak_idx + 1];
2942    if freq < 1e-15 {
2943        return None;
2944    }
2945    let fft_power = power_no_dc[peak_idx];
2946    let normalized_power = fft_power / total_power.max(1e-15);
2947    let refined_period = refine_period_gradient(acf, 1.0 / freq, dt, steps);
2948    let refined_acf_score = validate_period_acf(acf, refined_period, dt);
2949    Some(AutoperiodCandidate {
2950        period: refined_period,
2951        fft_power,
2952        acf_score: refined_acf_score,
2953        combined_score: normalized_power * refined_acf_score,
2954    })
2955}
2956
2957/// Autoperiod: Hybrid FFT + ACF Period Detection
2958///
2959/// Implements the Autoperiod algorithm (Vlachos et al. 2005) which:
2960/// 1. Computes the periodogram via FFT to find candidate periods
2961/// 2. Validates each candidate using the autocorrelation function
2962/// 3. Applies gradient ascent to refine the period estimate
2963/// 4. Returns the period with the highest combined confidence
2964///
2965/// This method is more robust than pure FFT because ACF validation
2966/// filters out spurious spectral peaks that don't correspond to
2967/// true periodicity.
2968///
2969/// # Arguments
2970/// * `data` - Input signal (1D time series)
2971/// * `argvals` - Time points corresponding to data
2972/// * `n_candidates` - Maximum number of FFT peaks to consider (default: 5)
2973/// * `gradient_steps` - Number of gradient ascent refinement steps (default: 10)
2974///
2975/// # Returns
2976/// * `AutoperiodResult` containing the best period and validation details
2977pub fn autoperiod(
2978    data: &[f64],
2979    argvals: &[f64],
2980    n_candidates: Option<usize>,
2981    gradient_steps: Option<usize>,
2982) -> AutoperiodResult {
2983    let n = data.len();
2984    let max_candidates = n_candidates.unwrap_or(5);
2985    let steps = gradient_steps.unwrap_or(10);
2986
2987    if n < 8 || argvals.len() != n {
2988        return empty_autoperiod_result();
2989    }
2990
2991    let dt = (argvals[n - 1] - argvals[0]) / (n - 1) as f64;
2992    let max_lag = (n / 2).max(4);
2993
2994    // Step 1: Compute periodogram and find candidate periods
2995    let (frequencies, power) = periodogram(data, argvals);
2996
2997    if frequencies.len() < 3 {
2998        return empty_autoperiod_result();
2999    }
3000
3001    // Find top spectral peaks
3002    let power_no_dc: Vec<f64> = power.iter().skip(1).copied().collect();
3003    let peak_indices = find_spectral_peaks(&power_no_dc);
3004
3005    if peak_indices.is_empty() {
3006        return empty_autoperiod_result();
3007    }
3008
3009    // Step 2: Compute ACF for validation
3010    let acf = autocorrelation(data, max_lag);
3011
3012    // Step 3: Validate each candidate and refine with gradient ascent
3013    let total_power: f64 = power_no_dc.iter().sum();
3014    let candidates: Vec<AutoperiodCandidate> = peak_indices
3015        .iter()
3016        .take(max_candidates)
3017        .filter_map(|&peak_idx| {
3018            build_autoperiod_candidate(
3019                peak_idx,
3020                &frequencies,
3021                &power_no_dc,
3022                &acf,
3023                dt,
3024                steps,
3025                total_power,
3026            )
3027        })
3028        .collect();
3029
3030    if candidates.is_empty() {
3031        return empty_autoperiod_result();
3032    }
3033
3034    // Select best candidate based on combined score
3035    let best = candidates
3036        .iter()
3037        .max_by(|a, b| {
3038            a.combined_score
3039                .partial_cmp(&b.combined_score)
3040                .unwrap_or(std::cmp::Ordering::Equal)
3041        })
3042        .unwrap();
3043
3044    AutoperiodResult {
3045        period: best.period,
3046        confidence: best.combined_score,
3047        fft_power: best.fft_power,
3048        acf_validation: best.acf_score,
3049        candidates,
3050    }
3051}
3052
3053/// Autoperiod for functional data (matrix format)
3054pub fn autoperiod_fdata(
3055    data: &FdMatrix,
3056    argvals: &[f64],
3057    n_candidates: Option<usize>,
3058    gradient_steps: Option<usize>,
3059) -> AutoperiodResult {
3060    let (n, m) = data.shape();
3061    if n == 0 || m < 8 || argvals.len() != m {
3062        return AutoperiodResult {
3063            period: f64::NAN,
3064            confidence: 0.0,
3065            fft_power: 0.0,
3066            acf_validation: 0.0,
3067            candidates: Vec::new(),
3068        };
3069    }
3070
3071    let mean_curve = compute_mean_curve(data);
3072    autoperiod(&mean_curve, argvals, n_candidates, gradient_steps)
3073}
3074
3075/// Validate a candidate period using ACF
3076fn validate_period_acf(acf: &[f64], period: f64, dt: f64) -> f64 {
3077    let lag = (period / dt).round() as usize;
3078
3079    if lag == 0 || lag >= acf.len() {
3080        return 0.0;
3081    }
3082
3083    // Score based on ACF value at the period lag
3084    // Positive ACF values indicate valid periodicity
3085    let acf_at_lag = acf[lag];
3086
3087    // Also check harmonics (period/2, period*2) for consistency
3088    let half_lag = lag / 2;
3089    let double_lag = lag * 2;
3090
3091    let mut score = acf_at_lag.max(0.0);
3092
3093    // For a true period, ACF at half-period should be low/negative
3094    // and ACF at double-period should also be high
3095    if half_lag > 0 && half_lag < acf.len() {
3096        let half_acf = acf[half_lag];
3097        // Penalize if half-period has high ACF (suggests half-period is real)
3098        if half_acf > acf_at_lag * 0.7 {
3099            score *= 0.5;
3100        }
3101    }
3102
3103    if double_lag < acf.len() {
3104        let double_acf = acf[double_lag];
3105        // Bonus if double-period also shows periodicity
3106        if double_acf > 0.3 {
3107            score *= 1.2;
3108        }
3109    }
3110
3111    score.min(1.0)
3112}
3113
3114/// Refine period estimate using gradient ascent on ACF
3115fn refine_period_gradient(acf: &[f64], initial_period: f64, dt: f64, steps: usize) -> f64 {
3116    let mut period = initial_period;
3117    let step_size = dt * 0.5; // Search step size
3118
3119    for _ in 0..steps {
3120        let current_score = validate_period_acf(acf, period, dt);
3121        let left_score = validate_period_acf(acf, period - step_size, dt);
3122        let right_score = validate_period_acf(acf, period + step_size, dt);
3123
3124        if left_score > current_score && left_score > right_score {
3125            period -= step_size;
3126        } else if right_score > current_score {
3127            period += step_size;
3128        }
3129        // If current is best, we've converged
3130    }
3131
3132    period.max(dt) // Ensure period is at least one time step
3133}
3134
3135/// Result of CFDAutoperiod detection.
3136#[derive(Debug, Clone)]
3137pub struct CfdAutoperiodResult {
3138    /// Detected period (primary)
3139    pub period: f64,
3140    /// Confidence score
3141    pub confidence: f64,
3142    /// ACF validation score for the primary period
3143    pub acf_validation: f64,
3144    /// All detected periods (cluster centers)
3145    pub periods: Vec<f64>,
3146    /// Confidence for each detected period
3147    pub confidences: Vec<f64>,
3148}
3149
3150/// Convert spectral peak indices to candidate (period, normalized_power) pairs.
3151fn generate_cfd_candidates(
3152    frequencies: &[f64],
3153    power_no_dc: &[f64],
3154    peak_indices: &[usize],
3155) -> Vec<(f64, f64)> {
3156    let total_power: f64 = power_no_dc.iter().sum();
3157    peak_indices
3158        .iter()
3159        .filter_map(|&peak_idx| {
3160            let freq = frequencies[peak_idx + 1];
3161            if freq > 1e-15 {
3162                let period = 1.0 / freq;
3163                let normalized_power = power_no_dc[peak_idx] / total_power.max(1e-15);
3164                Some((period, normalized_power))
3165            } else {
3166                None
3167            }
3168        })
3169        .collect()
3170}
3171
3172/// Validate clustered period candidates using ACF, returning (period, acf_score, power) triples.
3173fn validate_cfd_candidates(clusters: &[(f64, f64)], acf: &[f64], dt: f64) -> Vec<(f64, f64, f64)> {
3174    clusters
3175        .iter()
3176        .filter_map(|&(center, power_sum)| {
3177            let acf_score = validate_period_acf(acf, center, dt);
3178            if acf_score > 0.1 {
3179                Some((center, acf_score, power_sum))
3180            } else {
3181                None
3182            }
3183        })
3184        .collect()
3185}
3186
3187/// Validate cluster candidates with ACF, falling back to best cluster if none pass.
3188fn validate_or_fallback_cfd(
3189    validated: Vec<(f64, f64, f64)>,
3190    candidates: &[(f64, f64)],
3191    tol: f64,
3192    min_size: usize,
3193) -> Vec<(f64, f64, f64)> {
3194    if !validated.is_empty() {
3195        return validated;
3196    }
3197    // Fallback: pick highest-power cluster without ACF validation
3198    cluster_periods(candidates, tol, min_size)
3199        .into_iter()
3200        .max_by(|a, b| a.1.partial_cmp(&b.1).unwrap_or(std::cmp::Ordering::Equal))
3201        .map(|(center, power_sum)| vec![(center, 0.0, power_sum)])
3202        .unwrap_or_default()
3203}
3204
3205/// Rank validated results by combined score (acf * power).
3206/// Returns (periods, confidences, top_acf_validation).
3207fn rank_cfd_results(validated: &[(f64, f64, f64)]) -> (Vec<f64>, Vec<f64>, f64) {
3208    let mut sorted: Vec<_> = validated.to_vec();
3209    sorted.sort_by(|a, b| {
3210        (b.1 * b.2)
3211            .partial_cmp(&(a.1 * a.2))
3212            .unwrap_or(std::cmp::Ordering::Equal)
3213    });
3214    let top_acf = sorted[0].1;
3215    let periods = sorted.iter().map(|v| v.0).collect();
3216    let confidences = sorted.iter().map(|v| v.1 * v.2).collect();
3217    (periods, confidences, top_acf)
3218}
3219
3220fn empty_cfd_result() -> CfdAutoperiodResult {
3221    CfdAutoperiodResult {
3222        period: f64::NAN,
3223        confidence: 0.0,
3224        acf_validation: 0.0,
3225        periods: Vec::new(),
3226        confidences: Vec::new(),
3227    }
3228}
3229
3230/// Extract spectral candidates from differenced data: difference, periodogram, peak-find, generate.
3231fn extract_cfd_spectral_candidates(data: &[f64], argvals: &[f64]) -> Option<Vec<(f64, f64)>> {
3232    let diff: Vec<f64> = data.windows(2).map(|w| w[1] - w[0]).collect();
3233    let diff_argvals: Vec<f64> = argvals.windows(2).map(|w| (w[0] + w[1]) / 2.0).collect();
3234    let (frequencies, power) = periodogram(&diff, &diff_argvals);
3235    if frequencies.len() < 3 {
3236        return None;
3237    }
3238    let power_no_dc: Vec<f64> = power.iter().skip(1).copied().collect();
3239    let peak_indices = find_spectral_peaks(&power_no_dc);
3240    if peak_indices.is_empty() {
3241        return None;
3242    }
3243    let candidates = generate_cfd_candidates(&frequencies, &power_no_dc, &peak_indices);
3244    if candidates.is_empty() {
3245        None
3246    } else {
3247        Some(candidates)
3248    }
3249}
3250
3251/// CFDAutoperiod: Clustered Filtered Detrended Autoperiod
3252///
3253/// Implements the CFDAutoperiod algorithm (Puech et al. 2020) which:
3254/// 1. Applies first-order differencing to remove trends
3255/// 2. Computes FFT on the detrended signal
3256/// 3. Identifies candidate periods from periodogram peaks
3257/// 4. Clusters nearby candidates using density-based clustering
3258/// 5. Validates cluster centers using ACF on the original signal
3259///
3260/// This method is particularly effective for signals with strong trends
3261/// and handles multiple periodicities by detecting clusters of candidate periods.
3262///
3263/// # Arguments
3264/// * `data` - Input signal (1D time series)
3265/// * `argvals` - Time points corresponding to data
3266/// * `cluster_tolerance` - Relative tolerance for clustering periods (default: 0.1 = 10%)
3267/// * `min_cluster_size` - Minimum number of candidates to form a cluster (default: 1)
3268///
3269/// # Returns
3270/// * `CfdAutoperiodResult` containing detected periods and validation scores
3271pub fn cfd_autoperiod(
3272    data: &[f64],
3273    argvals: &[f64],
3274    cluster_tolerance: Option<f64>,
3275    min_cluster_size: Option<usize>,
3276) -> CfdAutoperiodResult {
3277    let n = data.len();
3278    let tol = cluster_tolerance.unwrap_or(0.1);
3279    let min_size = min_cluster_size.unwrap_or(1);
3280
3281    if n < 8 || argvals.len() != n {
3282        return empty_cfd_result();
3283    }
3284
3285    let dt = (argvals[n - 1] - argvals[0]) / (n - 1) as f64;
3286    let max_lag = (n / 2).max(4);
3287
3288    let Some(candidates) = extract_cfd_spectral_candidates(data, argvals) else {
3289        return empty_cfd_result();
3290    };
3291
3292    let clusters = cluster_periods(&candidates, tol, min_size);
3293    if clusters.is_empty() {
3294        return empty_cfd_result();
3295    }
3296
3297    let acf = autocorrelation(data, max_lag);
3298    let validated = validate_cfd_candidates(&clusters, &acf, dt);
3299    let validated = validate_or_fallback_cfd(validated, &candidates, tol, min_size);
3300    let (periods, confidences, top_acf) = rank_cfd_results(&validated);
3301
3302    CfdAutoperiodResult {
3303        period: periods[0],
3304        confidence: confidences[0],
3305        acf_validation: top_acf,
3306        periods,
3307        confidences,
3308    }
3309}
3310
3311/// CFDAutoperiod for functional data (matrix format)
3312pub fn cfd_autoperiod_fdata(
3313    data: &FdMatrix,
3314    argvals: &[f64],
3315    cluster_tolerance: Option<f64>,
3316    min_cluster_size: Option<usize>,
3317) -> CfdAutoperiodResult {
3318    let (n, m) = data.shape();
3319    if n == 0 || m < 8 || argvals.len() != m {
3320        return CfdAutoperiodResult {
3321            period: f64::NAN,
3322            confidence: 0.0,
3323            acf_validation: 0.0,
3324            periods: Vec::new(),
3325            confidences: Vec::new(),
3326        };
3327    }
3328
3329    let mean_curve = compute_mean_curve(data);
3330    cfd_autoperiod(&mean_curve, argvals, cluster_tolerance, min_cluster_size)
3331}
3332
3333/// Cluster periods using a simple density-based approach
3334fn cluster_periods(candidates: &[(f64, f64)], tolerance: f64, min_size: usize) -> Vec<(f64, f64)> {
3335    if candidates.is_empty() {
3336        return Vec::new();
3337    }
3338
3339    // Sort candidates by period
3340    let mut sorted: Vec<(f64, f64)> = candidates.to_vec();
3341    sorted.sort_by(|a, b| a.0.partial_cmp(&b.0).unwrap_or(std::cmp::Ordering::Equal));
3342
3343    let mut clusters: Vec<(f64, f64)> = Vec::new(); // (center, total_power)
3344    let mut current_cluster: Vec<(f64, f64)> = vec![sorted[0]];
3345
3346    for &(period, power) in sorted.iter().skip(1) {
3347        let cluster_center =
3348            current_cluster.iter().map(|(p, _)| p).sum::<f64>() / current_cluster.len() as f64;
3349
3350        let rel_diff = (period - cluster_center).abs() / cluster_center.max(period);
3351
3352        if rel_diff <= tolerance {
3353            // Add to current cluster
3354            current_cluster.push((period, power));
3355        } else {
3356            // Finish current cluster and start new one
3357            if current_cluster.len() >= min_size {
3358                let center = current_cluster.iter().map(|(p, pw)| p * pw).sum::<f64>()
3359                    / current_cluster
3360                        .iter()
3361                        .map(|(_, pw)| pw)
3362                        .sum::<f64>()
3363                        .max(1e-15);
3364                let total_power: f64 = current_cluster.iter().map(|(_, pw)| pw).sum();
3365                clusters.push((center, total_power));
3366            }
3367            current_cluster = vec![(period, power)];
3368        }
3369    }
3370
3371    // Don't forget the last cluster
3372    if current_cluster.len() >= min_size {
3373        let center = current_cluster.iter().map(|(p, pw)| p * pw).sum::<f64>()
3374            / current_cluster
3375                .iter()
3376                .map(|(_, pw)| pw)
3377                .sum::<f64>()
3378                .max(1e-15);
3379        let total_power: f64 = current_cluster.iter().map(|(_, pw)| pw).sum();
3380        clusters.push((center, total_power));
3381    }
3382
3383    clusters
3384}
3385
3386// ============================================================================
3387// Lomb-Scargle Periodogram
3388// ============================================================================
3389
3390/// Result of Lomb-Scargle periodogram analysis.
3391#[derive(Debug, Clone)]
3392pub struct LombScargleResult {
3393    /// Test frequencies
3394    pub frequencies: Vec<f64>,
3395    /// Corresponding periods (1/frequency)
3396    pub periods: Vec<f64>,
3397    /// Normalized Lomb-Scargle power at each frequency
3398    pub power: Vec<f64>,
3399    /// Peak period (highest power)
3400    pub peak_period: f64,
3401    /// Peak frequency
3402    pub peak_frequency: f64,
3403    /// Peak power
3404    pub peak_power: f64,
3405    /// False alarm probability at peak (significance level)
3406    pub false_alarm_probability: f64,
3407    /// Significance level (1 - FAP)
3408    pub significance: f64,
3409}
3410
3411/// Compute Lomb-Scargle periodogram for irregularly sampled data.
3412///
3413/// The Lomb-Scargle periodogram is designed for unevenly-spaced time series
3414/// and reduces to the standard periodogram for evenly-spaced data.
3415///
3416/// # Algorithm
3417/// Following Scargle (1982) and Horne & Baliunas (1986):
3418/// 1. For each test frequency ω, compute the phase shift τ
3419/// 2. Compute the normalized power P(ω)
3420/// 3. Estimate false alarm probability using the exponential distribution
3421///
3422/// # Arguments
3423/// * `times` - Observation times (not necessarily evenly spaced)
3424/// * `values` - Observed values at each time
3425/// * `frequencies` - Optional frequencies to evaluate (cycles per unit time).
3426///   If None, automatically generates a frequency grid.
3427/// * `oversampling` - Oversampling factor for auto-generated frequency grid.
3428///   Default: 4.0. Higher values give finer frequency resolution.
3429/// * `nyquist_factor` - Maximum frequency as multiple of pseudo-Nyquist.
3430///   Default: 1.0.
3431///
3432/// # Returns
3433/// `LombScargleResult` with power spectrum and significance estimates.
3434///
3435/// # Example
3436/// ```rust
3437/// use fdars_core::seasonal::lomb_scargle;
3438/// use std::f64::consts::PI;
3439///
3440/// // Irregularly sampled sine wave
3441/// let times: Vec<f64> = vec![0.0, 0.3, 0.7, 1.2, 1.5, 2.1, 2.8, 3.0, 3.5, 4.0];
3442/// let period = 1.5;
3443/// let values: Vec<f64> = times.iter()
3444///     .map(|&t| (2.0 * PI * t / period).sin())
3445///     .collect();
3446///
3447/// let result = lomb_scargle(&times, &values, None, None, None);
3448/// assert!((result.peak_period - period).abs() < 0.2);
3449/// ```
3450pub fn lomb_scargle(
3451    times: &[f64],
3452    values: &[f64],
3453    frequencies: Option<&[f64]>,
3454    oversampling: Option<f64>,
3455    nyquist_factor: Option<f64>,
3456) -> LombScargleResult {
3457    let n = times.len();
3458    assert_eq!(n, values.len(), "times and values must have same length");
3459    assert!(n >= 3, "Need at least 3 data points");
3460
3461    // Compute mean and variance
3462    let mean_y: f64 = values.iter().sum::<f64>() / n as f64;
3463    let var_y: f64 = values.iter().map(|&y| (y - mean_y).powi(2)).sum::<f64>() / (n - 1) as f64;
3464
3465    // Generate frequency grid if not provided
3466    let freq_vec: Vec<f64>;
3467    let freqs = if let Some(f) = frequencies {
3468        f
3469    } else {
3470        freq_vec = generate_ls_frequencies(
3471            times,
3472            oversampling.unwrap_or(4.0),
3473            nyquist_factor.unwrap_or(1.0),
3474        );
3475        &freq_vec
3476    };
3477
3478    // Compute Lomb-Scargle power at each frequency
3479    let mut power = Vec::with_capacity(freqs.len());
3480
3481    for &freq in freqs.iter() {
3482        let omega = 2.0 * PI * freq;
3483        let p = lomb_scargle_single_freq(times, values, mean_y, var_y, omega);
3484        power.push(p);
3485    }
3486
3487    // Find peak
3488    let (peak_idx, &peak_power) = power
3489        .iter()
3490        .enumerate()
3491        .max_by(|(_, a), (_, b)| a.partial_cmp(b).unwrap_or(std::cmp::Ordering::Equal))
3492        .unwrap_or((0, &0.0));
3493
3494    let peak_frequency = freqs.get(peak_idx).copied().unwrap_or(0.0);
3495    let peak_period = if peak_frequency > 0.0 {
3496        1.0 / peak_frequency
3497    } else {
3498        f64::INFINITY
3499    };
3500
3501    // Compute false alarm probability
3502    let n_indep = estimate_independent_frequencies(times, freqs.len());
3503    let fap = lomb_scargle_fap(peak_power, n_indep, n);
3504
3505    // Compute periods from frequencies
3506    let periods: Vec<f64> = freqs
3507        .iter()
3508        .map(|&f| if f > 0.0 { 1.0 / f } else { f64::INFINITY })
3509        .collect();
3510
3511    LombScargleResult {
3512        frequencies: freqs.to_vec(),
3513        periods,
3514        power,
3515        peak_period,
3516        peak_frequency,
3517        peak_power,
3518        false_alarm_probability: fap,
3519        significance: 1.0 - fap,
3520    }
3521}
3522
3523/// Compute Lomb-Scargle power at a single frequency.
3524///
3525/// Uses the Scargle (1982) normalization.
3526fn lomb_scargle_single_freq(
3527    times: &[f64],
3528    values: &[f64],
3529    mean_y: f64,
3530    var_y: f64,
3531    omega: f64,
3532) -> f64 {
3533    if var_y <= 0.0 || omega <= 0.0 {
3534        return 0.0;
3535    }
3536
3537    let n = times.len();
3538
3539    // Compute tau (phase shift) to make sine and cosine terms orthogonal
3540    let mut sum_sin2 = 0.0;
3541    let mut sum_cos2 = 0.0;
3542    for &t in times.iter() {
3543        let arg = 2.0 * omega * t;
3544        sum_sin2 += arg.sin();
3545        sum_cos2 += arg.cos();
3546    }
3547    let tau = (sum_sin2).atan2(sum_cos2) / (2.0 * omega);
3548
3549    // Compute sums for power calculation
3550    let mut ss = 0.0; // Sum of sin terms
3551    let mut sc = 0.0; // Sum of cos terms
3552    let mut css = 0.0; // Sum of cos^2
3553    let mut sss = 0.0; // Sum of sin^2
3554
3555    for i in 0..n {
3556        let y_centered = values[i] - mean_y;
3557        let arg = omega * (times[i] - tau);
3558        let c = arg.cos();
3559        let s = arg.sin();
3560
3561        sc += y_centered * c;
3562        ss += y_centered * s;
3563        css += c * c;
3564        sss += s * s;
3565    }
3566
3567    // Avoid division by zero
3568    let css = css.max(1e-15);
3569    let sss = sss.max(1e-15);
3570
3571    // Lomb-Scargle power (Scargle 1982 normalization)
3572    0.5 * (sc * sc / css + ss * ss / sss) / var_y
3573}
3574
3575/// Generate frequency grid for Lomb-Scargle.
3576///
3577/// The grid spans from 1/T_total to f_nyquist with oversampling.
3578fn generate_ls_frequencies(times: &[f64], oversampling: f64, nyquist_factor: f64) -> Vec<f64> {
3579    let n = times.len();
3580    if n < 2 {
3581        return vec![0.0];
3582    }
3583
3584    // Time span
3585    let t_min = times.iter().cloned().fold(f64::INFINITY, f64::min);
3586    let t_max = times.iter().cloned().fold(f64::NEG_INFINITY, f64::max);
3587    let t_span = (t_max - t_min).max(1e-10);
3588
3589    // Minimum frequency: one cycle over the observation span
3590    let f_min = 1.0 / t_span;
3591
3592    // Pseudo-Nyquist frequency for irregular data
3593    // Use average sampling rate as approximation
3594    let f_nyquist = 0.5 * (n - 1) as f64 / t_span;
3595
3596    // Maximum frequency
3597    let f_max = f_nyquist * nyquist_factor;
3598
3599    // Frequency resolution with oversampling
3600    let df = f_min / oversampling;
3601
3602    // Generate frequency grid
3603    let n_freq = ((f_max - f_min) / df).ceil() as usize + 1;
3604    let n_freq = n_freq.min(10000); // Cap to prevent memory issues
3605
3606    (0..n_freq).map(|i| f_min + i as f64 * df).collect()
3607}
3608
3609/// Estimate number of independent frequencies (for FAP calculation).
3610///
3611/// For irregularly sampled data, this is approximately the number of
3612/// data points (Horne & Baliunas 1986).
3613fn estimate_independent_frequencies(times: &[f64], n_freq: usize) -> usize {
3614    // A conservative estimate is min(n_data, n_frequencies)
3615    let n = times.len();
3616    n.min(n_freq)
3617}
3618
3619/// Compute false alarm probability for Lomb-Scargle peak.
3620///
3621/// Uses the exponential distribution approximation:
3622/// FAP ≈ 1 - (1 - exp(-z))^M
3623/// where z is the power and M is the number of independent frequencies.
3624fn lomb_scargle_fap(power: f64, n_indep: usize, _n_data: usize) -> f64 {
3625    if power <= 0.0 || n_indep == 0 {
3626        return 1.0;
3627    }
3628
3629    // Probability that a single frequency has power < z
3630    let prob_single = 1.0 - (-power).exp();
3631
3632    // Probability that all M frequencies have power < z
3633    // FAP = 1 - (1 - exp(-z))^M
3634    // For numerical stability, use log:
3635    // 1 - FAP = prob_single^M
3636    // FAP = 1 - exp(M * ln(prob_single))
3637
3638    if prob_single >= 1.0 {
3639        return 0.0; // Very significant
3640    }
3641    if prob_single <= 0.0 {
3642        return 1.0; // Not significant
3643    }
3644
3645    let log_prob = prob_single.ln();
3646    let log_cdf = n_indep as f64 * log_prob;
3647
3648    if log_cdf < -700.0 {
3649        0.0 // Numerical underflow, very significant
3650    } else {
3651        1.0 - log_cdf.exp()
3652    }
3653}
3654
3655/// Compute Lomb-Scargle periodogram for functional data (multiple curves).
3656///
3657/// Computes the periodogram for each curve and returns the result for the
3658/// mean curve or ensemble statistics.
3659///
3660/// # Arguments
3661/// * `data` - Column-major matrix (n x m) of functional data
3662/// * `n` - Number of samples (rows)
3663/// * `m` - Number of evaluation points (columns)
3664/// * `argvals` - Time points of length m
3665/// * `oversampling` - Oversampling factor. Default: 4.0
3666/// * `nyquist_factor` - Maximum frequency multiplier. Default: 1.0
3667///
3668/// # Returns
3669/// `LombScargleResult` computed from the mean curve.
3670pub fn lomb_scargle_fdata(
3671    data: &FdMatrix,
3672    argvals: &[f64],
3673    oversampling: Option<f64>,
3674    nyquist_factor: Option<f64>,
3675) -> LombScargleResult {
3676    // Compute mean curve
3677    let mean_curve = compute_mean_curve(data);
3678
3679    // Run Lomb-Scargle on mean curve
3680    lomb_scargle(argvals, &mean_curve, None, oversampling, nyquist_factor)
3681}
3682
3683// ============================================================================
3684// Matrix Profile (STOMP Algorithm)
3685// ============================================================================
3686
3687/// Result of Matrix Profile computation.
3688#[derive(Debug, Clone)]
3689pub struct MatrixProfileResult {
3690    /// The matrix profile (minimum z-normalized distance for each position)
3691    pub profile: Vec<f64>,
3692    /// Index of the nearest neighbor for each position
3693    pub profile_index: Vec<usize>,
3694    /// Subsequence length used
3695    pub subsequence_length: usize,
3696    /// Detected periods from arc analysis
3697    pub detected_periods: Vec<f64>,
3698    /// Arc counts at each index distance (for period detection)
3699    pub arc_counts: Vec<usize>,
3700    /// Most prominent detected period
3701    pub primary_period: f64,
3702    /// Confidence score for primary period (based on arc prominence)
3703    pub confidence: f64,
3704}
3705
3706/// Compute Matrix Profile using STOMP algorithm (Scalable Time series Ordered-search Matrix Profile).
3707///
3708/// The Matrix Profile is a data structure that stores the z-normalized Euclidean distance
3709/// between each subsequence of a time series and its nearest neighbor. It enables efficient
3710/// motif discovery and anomaly detection.
3711///
3712/// # Algorithm (STOMP - Zhu et al. 2016)
3713/// 1. Pre-compute sliding mean and standard deviation using cumulative sums
3714/// 2. Use FFT to compute first row of distance matrix
3715/// 3. Update subsequent rows incrementally using the dot product update rule
3716/// 4. Track minimum distance and index at each position
3717///
3718/// # Arguments
3719/// * `values` - Time series values
3720/// * `subsequence_length` - Length of subsequences to compare (window size)
3721/// * `exclusion_zone` - Fraction of subsequence length to exclude around each position
3722///   to prevent trivial self-matches. Default: 0.5
3723///
3724/// # Returns
3725/// `MatrixProfileResult` with profile, indices, and detected periods.
3726///
3727/// # Example
3728/// ```rust
3729/// use fdars_core::seasonal::matrix_profile;
3730/// use std::f64::consts::PI;
3731///
3732/// // Periodic signal
3733/// let period = 20.0;
3734/// let values: Vec<f64> = (0..100)
3735///     .map(|i| (2.0 * PI * i as f64 / period).sin())
3736///     .collect();
3737///
3738/// let result = matrix_profile(&values, Some(15), None);
3739/// assert!((result.primary_period - period).abs() < 5.0);
3740/// ```
3741pub fn matrix_profile(
3742    values: &[f64],
3743    subsequence_length: Option<usize>,
3744    exclusion_zone: Option<f64>,
3745) -> MatrixProfileResult {
3746    let n = values.len();
3747
3748    // Default subsequence length: ~ 1/4 of series length, capped at reasonable range
3749    let m = subsequence_length.unwrap_or_else(|| {
3750        let default_m = n / 4;
3751        default_m.max(4).min(n / 2)
3752    });
3753
3754    assert!(m >= 3, "Subsequence length must be at least 3");
3755    assert!(
3756        m <= n / 2,
3757        "Subsequence length must be at most half the series length"
3758    );
3759
3760    let exclusion_zone = exclusion_zone.unwrap_or(0.5);
3761    let exclusion_radius = (m as f64 * exclusion_zone).ceil() as usize;
3762
3763    // Number of subsequences
3764    let profile_len = n - m + 1;
3765
3766    // Compute sliding statistics
3767    let (means, stds) = compute_sliding_stats(values, m);
3768
3769    // Compute the matrix profile using STOMP
3770    let (profile, profile_index) = stomp_core(values, m, &means, &stds, exclusion_radius);
3771
3772    // Perform arc analysis to detect periods
3773    let (arc_counts, detected_periods, primary_period, confidence) =
3774        analyze_arcs(&profile_index, profile_len, m);
3775
3776    MatrixProfileResult {
3777        profile,
3778        profile_index,
3779        subsequence_length: m,
3780        detected_periods,
3781        arc_counts,
3782        primary_period,
3783        confidence,
3784    }
3785}
3786
3787/// Compute sliding mean and standard deviation using cumulative sums.
3788///
3789/// This is O(n) and avoids numerical issues with naive implementations.
3790fn compute_sliding_stats(values: &[f64], m: usize) -> (Vec<f64>, Vec<f64>) {
3791    let n = values.len();
3792    let profile_len = n - m + 1;
3793
3794    // Compute cumulative sums
3795    let mut cumsum = vec![0.0; n + 1];
3796    let mut cumsum_sq = vec![0.0; n + 1];
3797
3798    for i in 0..n {
3799        cumsum[i + 1] = cumsum[i] + values[i];
3800        cumsum_sq[i + 1] = cumsum_sq[i] + values[i] * values[i];
3801    }
3802
3803    // Compute means and stds
3804    let mut means = Vec::with_capacity(profile_len);
3805    let mut stds = Vec::with_capacity(profile_len);
3806
3807    let m_f64 = m as f64;
3808
3809    for i in 0..profile_len {
3810        let sum = cumsum[i + m] - cumsum[i];
3811        let sum_sq = cumsum_sq[i + m] - cumsum_sq[i];
3812
3813        let mean = sum / m_f64;
3814        let variance = (sum_sq / m_f64) - mean * mean;
3815        let std = variance.max(0.0).sqrt();
3816
3817        means.push(mean);
3818        stds.push(std.max(1e-10)); // Prevent division by zero
3819    }
3820
3821    (means, stds)
3822}
3823
3824/// Core STOMP algorithm implementation.
3825///
3826/// Uses FFT for the first row and incremental updates for subsequent rows.
3827fn stomp_core(
3828    values: &[f64],
3829    m: usize,
3830    means: &[f64],
3831    stds: &[f64],
3832    exclusion_radius: usize,
3833) -> (Vec<f64>, Vec<usize>) {
3834    let n = values.len();
3835    let profile_len = n - m + 1;
3836
3837    // Initialize profile with infinity and index with 0
3838    let mut profile = vec![f64::INFINITY; profile_len];
3839    let mut profile_index = vec![0usize; profile_len];
3840
3841    // Compute first row using direct computation (could use FFT for large n)
3842    // QT[0,j] = sum(T[0:m] * T[j:j+m]) for each j
3843    let mut qt = vec![0.0; profile_len];
3844
3845    // First query subsequence
3846    for j in 0..profile_len {
3847        let mut dot = 0.0;
3848        for k in 0..m {
3849            dot += values[k] * values[j + k];
3850        }
3851        qt[j] = dot;
3852    }
3853
3854    // Process first row
3855    update_profile_row(
3856        0,
3857        &qt,
3858        means,
3859        stds,
3860        m,
3861        exclusion_radius,
3862        &mut profile,
3863        &mut profile_index,
3864    );
3865
3866    // Process subsequent rows using incremental updates
3867    for i in 1..profile_len {
3868        // Update QT using the sliding dot product update
3869        // QT[i,j] = QT[i-1,j-1] - T[i-1]*T[j-1] + T[i+m-1]*T[j+m-1]
3870        let mut qt_new = vec![0.0; profile_len];
3871
3872        // First element needs direct computation
3873        let mut dot = 0.0;
3874        for k in 0..m {
3875            dot += values[i + k] * values[k];
3876        }
3877        qt_new[0] = dot;
3878
3879        // Update rest using incremental formula
3880        for j in 1..profile_len {
3881            qt_new[j] =
3882                qt[j - 1] - values[i - 1] * values[j - 1] + values[i + m - 1] * values[j + m - 1];
3883        }
3884
3885        qt = qt_new;
3886
3887        // Update profile with this row
3888        update_profile_row(
3889            i,
3890            &qt,
3891            means,
3892            stds,
3893            m,
3894            exclusion_radius,
3895            &mut profile,
3896            &mut profile_index,
3897        );
3898    }
3899
3900    (profile, profile_index)
3901}
3902
3903/// Update profile with distances from row i.
3904fn update_profile_row(
3905    i: usize,
3906    qt: &[f64],
3907    means: &[f64],
3908    stds: &[f64],
3909    m: usize,
3910    exclusion_radius: usize,
3911    profile: &mut [f64],
3912    profile_index: &mut [usize],
3913) {
3914    let profile_len = profile.len();
3915    let m_f64 = m as f64;
3916
3917    for j in 0..profile_len {
3918        // Skip exclusion zone
3919        if i.abs_diff(j) <= exclusion_radius {
3920            continue;
3921        }
3922
3923        // Compute z-normalized distance
3924        // d = sqrt(2*m * (1 - (QT - m*mu_i*mu_j) / (m * sigma_i * sigma_j)))
3925        let numerator = qt[j] - m_f64 * means[i] * means[j];
3926        let denominator = m_f64 * stds[i] * stds[j];
3927
3928        let pearson = if denominator > 0.0 {
3929            (numerator / denominator).clamp(-1.0, 1.0)
3930        } else {
3931            0.0
3932        };
3933
3934        let dist_sq = 2.0 * m_f64 * (1.0 - pearson);
3935        let dist = dist_sq.max(0.0).sqrt();
3936
3937        // Update profile for position i
3938        if dist < profile[i] {
3939            profile[i] = dist;
3940            profile_index[i] = j;
3941        }
3942
3943        // Update profile for position j (symmetric)
3944        if dist < profile[j] {
3945            profile[j] = dist;
3946            profile_index[j] = i;
3947        }
3948    }
3949}
3950
3951/// Analyze profile index to detect periods using arc counting.
3952///
3953/// Arcs connect each position to its nearest neighbor. The distance between
3954/// connected positions reveals repeating patterns (periods).
3955fn analyze_arcs(
3956    profile_index: &[usize],
3957    profile_len: usize,
3958    m: usize,
3959) -> (Vec<usize>, Vec<f64>, f64, f64) {
3960    // Count arcs at each index distance
3961    let max_distance = profile_len;
3962    let mut arc_counts = vec![0usize; max_distance];
3963
3964    for (i, &j) in profile_index.iter().enumerate() {
3965        let distance = i.abs_diff(j);
3966        if distance < max_distance {
3967            arc_counts[distance] += 1;
3968        }
3969    }
3970
3971    // Find peaks in arc counts (candidate periods)
3972    let min_period = m / 2; // Minimum meaningful period
3973    let mut peaks: Vec<(usize, usize)> = Vec::new();
3974
3975    // Simple peak detection with minimum spacing
3976    for i in min_period..arc_counts.len().saturating_sub(1) {
3977        if arc_counts[i] > arc_counts[i.saturating_sub(1)]
3978            && arc_counts[i] > arc_counts[(i + 1).min(arc_counts.len() - 1)]
3979            && arc_counts[i] >= 3
3980        // Minimum count threshold
3981        {
3982            peaks.push((i, arc_counts[i]));
3983        }
3984    }
3985
3986    // Sort by count descending
3987    peaks.sort_by(|a, b| b.1.cmp(&a.1));
3988
3989    // Extract top periods
3990    let detected_periods: Vec<f64> = peaks.iter().take(5).map(|(p, _)| *p as f64).collect();
3991
3992    // Primary period and confidence
3993    let (primary_period, confidence) = if let Some(&(period, count)) = peaks.first() {
3994        // Confidence based on relative peak prominence
3995        let total_arcs: usize = arc_counts[min_period..].iter().sum();
3996        let conf = if total_arcs > 0 {
3997            count as f64 / total_arcs as f64
3998        } else {
3999            0.0
4000        };
4001        (period as f64, conf.min(1.0))
4002    } else {
4003        (0.0, 0.0)
4004    };
4005
4006    (arc_counts, detected_periods, primary_period, confidence)
4007}
4008
4009/// Compute Matrix Profile for functional data (multiple curves).
4010///
4011/// Computes the matrix profile for each curve and returns aggregated results.
4012///
4013/// # Arguments
4014/// * `data` - Column-major matrix (n x m) of functional data
4015/// * `n` - Number of samples (rows)
4016/// * `m` - Number of evaluation points (columns)
4017/// * `subsequence_length` - Length of subsequences. If None, automatically determined.
4018/// * `exclusion_zone` - Exclusion zone fraction. Default: 0.5
4019///
4020/// # Returns
4021/// `MatrixProfileResult` computed from the mean curve.
4022pub fn matrix_profile_fdata(
4023    data: &FdMatrix,
4024    subsequence_length: Option<usize>,
4025    exclusion_zone: Option<f64>,
4026) -> MatrixProfileResult {
4027    // Compute mean curve
4028    let mean_curve = compute_mean_curve(data);
4029
4030    // Run matrix profile on mean curve
4031    matrix_profile(&mean_curve, subsequence_length, exclusion_zone)
4032}
4033
4034/// Detect seasonality using Matrix Profile analysis.
4035///
4036/// Returns true if significant periodicity is detected based on matrix profile analysis.
4037///
4038/// # Arguments
4039/// * `values` - Time series values
4040/// * `subsequence_length` - Length of subsequences to compare
4041/// * `confidence_threshold` - Minimum confidence for positive detection. Default: 0.1
4042///
4043/// # Returns
4044/// Tuple of (is_seasonal, detected_period, confidence)
4045pub fn matrix_profile_seasonality(
4046    values: &[f64],
4047    subsequence_length: Option<usize>,
4048    confidence_threshold: Option<f64>,
4049) -> (bool, f64, f64) {
4050    let result = matrix_profile(values, subsequence_length, None);
4051
4052    let threshold = confidence_threshold.unwrap_or(0.1);
4053    let is_seasonal = result.confidence >= threshold && result.primary_period > 0.0;
4054
4055    (is_seasonal, result.primary_period, result.confidence)
4056}
4057
4058// ============================================================================
4059// Singular Spectrum Analysis (SSA)
4060// ============================================================================
4061
4062/// Result of Singular Spectrum Analysis.
4063#[derive(Debug, Clone)]
4064pub struct SsaResult {
4065    /// Reconstructed trend component
4066    pub trend: Vec<f64>,
4067    /// Reconstructed seasonal/periodic component
4068    pub seasonal: Vec<f64>,
4069    /// Noise/residual component
4070    pub noise: Vec<f64>,
4071    /// Singular values from SVD (sorted descending)
4072    pub singular_values: Vec<f64>,
4073    /// Contribution of each component (proportion of variance)
4074    pub contributions: Vec<f64>,
4075    /// Window length used for embedding
4076    pub window_length: usize,
4077    /// Number of components extracted
4078    pub n_components: usize,
4079    /// Detected period (if any significant periodicity found)
4080    pub detected_period: f64,
4081    /// Confidence score for detected period
4082    pub confidence: f64,
4083}
4084
4085/// Singular Spectrum Analysis (SSA) for time series decomposition.
4086///
4087/// SSA is a model-free, non-parametric method for decomposing a time series
4088/// into trend, oscillatory (seasonal), and noise components using singular
4089/// value decomposition of the trajectory matrix.
4090///
4091/// # Algorithm
4092/// 1. **Embedding**: Convert series into trajectory matrix using sliding windows
4093/// 2. **Decomposition**: SVD of trajectory matrix
4094/// 3. **Grouping**: Identify trend vs. periodic vs. noise components
4095/// 4. **Reconstruction**: Diagonal averaging to recover time series
4096///
4097/// # Arguments
4098/// * `values` - Time series values
4099/// * `window_length` - Embedding window length (L). If None, uses L = min(n/2, 50).
4100///   Larger values capture longer-term patterns but need longer series.
4101/// * `n_components` - Number of components to extract. If None, uses 10.
4102/// * `trend_components` - Indices of components for trend (0-based). If None, auto-detect.
4103/// * `seasonal_components` - Indices of components for seasonal. If None, auto-detect.
4104///
4105/// # Returns
4106/// `SsaResult` with decomposed components and diagnostics.
4107///
4108/// # Example
4109/// ```rust
4110/// use fdars_core::seasonal::ssa;
4111/// use std::f64::consts::PI;
4112///
4113/// // Signal with trend + seasonal + noise
4114/// let n = 100;
4115/// let values: Vec<f64> = (0..n)
4116///     .map(|i| {
4117///         let t = i as f64;
4118///         0.01 * t + (2.0 * PI * t / 12.0).sin() + 0.1 * (i as f64 * 0.1).sin()
4119///     })
4120///     .collect();
4121///
4122/// let result = ssa(&values, None, None, None, None);
4123/// assert!(result.detected_period > 0.0);
4124/// ```
4125pub fn ssa(
4126    values: &[f64],
4127    window_length: Option<usize>,
4128    n_components: Option<usize>,
4129    trend_components: Option<&[usize]>,
4130    seasonal_components: Option<&[usize]>,
4131) -> SsaResult {
4132    let n = values.len();
4133
4134    // Default window length: min(n/2, 50)
4135    let l = window_length.unwrap_or_else(|| (n / 2).clamp(2, 50));
4136
4137    if n < 4 || l < 2 || l > n / 2 {
4138        return SsaResult {
4139            trend: values.to_vec(),
4140            seasonal: vec![0.0; n],
4141            noise: vec![0.0; n],
4142            singular_values: vec![],
4143            contributions: vec![],
4144            window_length: l,
4145            n_components: 0,
4146            detected_period: 0.0,
4147            confidence: 0.0,
4148        };
4149    }
4150
4151    // Number of columns in trajectory matrix
4152    let k = n - l + 1;
4153
4154    // Step 1: Embedding - create trajectory matrix (L x K)
4155    let trajectory = embed_trajectory(values, l, k);
4156
4157    // Step 2: SVD decomposition
4158    let (u, sigma, vt) = svd_decompose(&trajectory, l, k);
4159
4160    // Determine number of components to use
4161    let max_components = sigma.len();
4162    let n_comp = n_components.unwrap_or(10).min(max_components);
4163
4164    // Compute contributions (proportion of total variance)
4165    let total_var: f64 = sigma.iter().map(|&s| s * s).sum();
4166    let contributions: Vec<f64> = sigma
4167        .iter()
4168        .take(n_comp)
4169        .map(|&s| s * s / total_var.max(1e-15))
4170        .collect();
4171
4172    // Step 3: Grouping - identify trend and seasonal components
4173    let (trend_idx, seasonal_idx, detected_period, confidence) =
4174        if trend_components.is_some() || seasonal_components.is_some() {
4175            // Use provided groupings
4176            let t_idx: Vec<usize> = trend_components.map(|v| v.to_vec()).unwrap_or_default();
4177            let s_idx: Vec<usize> = seasonal_components.map(|v| v.to_vec()).unwrap_or_default();
4178            (t_idx, s_idx, 0.0, 0.0)
4179        } else {
4180            // Auto-detect groupings
4181            auto_group_ssa_components(&u, &sigma, l, k, n_comp)
4182        };
4183
4184    // Step 4: Reconstruction via diagonal averaging
4185    let trend = reconstruct_grouped(&u, &sigma, &vt, l, k, n, &trend_idx);
4186    let seasonal = reconstruct_grouped(&u, &sigma, &vt, l, k, n, &seasonal_idx);
4187
4188    // Noise is the remainder
4189    let noise: Vec<f64> = values
4190        .iter()
4191        .zip(trend.iter())
4192        .zip(seasonal.iter())
4193        .map(|((&y, &t), &s)| y - t - s)
4194        .collect();
4195
4196    SsaResult {
4197        trend,
4198        seasonal,
4199        noise,
4200        singular_values: sigma.into_iter().take(n_comp).collect(),
4201        contributions,
4202        window_length: l,
4203        n_components: n_comp,
4204        detected_period,
4205        confidence,
4206    }
4207}
4208
4209/// Create trajectory matrix by embedding the time series.
4210fn embed_trajectory(values: &[f64], l: usize, k: usize) -> Vec<f64> {
4211    // Trajectory matrix is L x K, stored column-major
4212    let mut trajectory = vec![0.0; l * k];
4213
4214    for j in 0..k {
4215        for i in 0..l {
4216            trajectory[i + j * l] = values[i + j];
4217        }
4218    }
4219
4220    trajectory
4221}
4222
4223/// SVD decomposition of trajectory matrix using nalgebra.
4224fn svd_decompose(trajectory: &[f64], l: usize, k: usize) -> (Vec<f64>, Vec<f64>, Vec<f64>) {
4225    use nalgebra::{DMatrix, SVD};
4226
4227    // Create nalgebra matrix (column-major)
4228    let mat = DMatrix::from_column_slice(l, k, trajectory);
4229
4230    // Compute SVD
4231    let svd = SVD::new(mat, true, true);
4232
4233    // Extract components (SVD::new with compute_u/v=true always produces both,
4234    // but handle gracefully in case of degenerate input)
4235    let u_mat = match svd.u {
4236        Some(u) => u,
4237        None => return (vec![], vec![], vec![]),
4238    };
4239    let vt_mat = match svd.v_t {
4240        Some(vt) => vt,
4241        None => return (vec![], vec![], vec![]),
4242    };
4243    let sigma = svd.singular_values;
4244
4245    // Convert to flat vectors
4246    let u: Vec<f64> = u_mat.iter().cloned().collect();
4247    let sigma_vec: Vec<f64> = sigma.iter().cloned().collect();
4248    let vt: Vec<f64> = vt_mat.iter().cloned().collect();
4249
4250    (u, sigma_vec, vt)
4251}
4252
4253enum SsaComponentKind {
4254    Trend,
4255    Seasonal(f64),
4256    Noise,
4257}
4258
4259/// Classify an SSA component as trend, seasonal, or noise.
4260fn classify_ssa_component(u_col: &[f64], trend_count: usize) -> SsaComponentKind {
4261    if is_trend_component(u_col) && trend_count < 2 {
4262        SsaComponentKind::Trend
4263    } else {
4264        let (is_periodic, period) = is_periodic_component(u_col);
4265        if is_periodic {
4266            SsaComponentKind::Seasonal(period)
4267        } else {
4268            SsaComponentKind::Noise
4269        }
4270    }
4271}
4272
4273/// Apply default groupings when auto-detection finds nothing.
4274fn apply_ssa_grouping_defaults(
4275    trend_idx: &mut Vec<usize>,
4276    seasonal_idx: &mut Vec<usize>,
4277    n_comp: usize,
4278) {
4279    if trend_idx.is_empty() && n_comp > 0 {
4280        trend_idx.push(0);
4281    }
4282    if seasonal_idx.is_empty() && n_comp >= 3 {
4283        seasonal_idx.push(1);
4284        if n_comp > 2 {
4285            seasonal_idx.push(2);
4286        }
4287    }
4288}
4289
4290/// Auto-detect trend and seasonal component groupings.
4291fn auto_group_ssa_components(
4292    u: &[f64],
4293    sigma: &[f64],
4294    l: usize,
4295    _k: usize,
4296    n_comp: usize,
4297) -> (Vec<usize>, Vec<usize>, f64, f64) {
4298    let mut trend_idx = Vec::new();
4299    let mut seasonal_idx = Vec::new();
4300    let mut detected_period = 0.0;
4301    let mut confidence = 0.0;
4302
4303    for i in 0..n_comp.min(sigma.len()) {
4304        let u_col: Vec<f64> = (0..l).map(|j| u[j + i * l]).collect();
4305        match classify_ssa_component(&u_col, trend_idx.len()) {
4306            SsaComponentKind::Trend => trend_idx.push(i),
4307            SsaComponentKind::Seasonal(period) => {
4308                seasonal_idx.push(i);
4309                if detected_period == 0.0 && period > 0.0 {
4310                    detected_period = period;
4311                    confidence = sigma[i] / sigma[0].max(1e-15);
4312                }
4313            }
4314            SsaComponentKind::Noise => {}
4315        }
4316    }
4317
4318    apply_ssa_grouping_defaults(&mut trend_idx, &mut seasonal_idx, n_comp);
4319    (trend_idx, seasonal_idx, detected_period, confidence)
4320}
4321
4322/// Check if a singular vector represents a trend component.
4323fn is_trend_component(u_col: &[f64]) -> bool {
4324    let n = u_col.len();
4325    if n < 3 {
4326        return false;
4327    }
4328
4329    // Count sign changes in the vector
4330    let mut sign_changes = 0;
4331    for i in 1..n {
4332        if u_col[i] * u_col[i - 1] < 0.0 {
4333            sign_changes += 1;
4334        }
4335    }
4336
4337    // Trend components have very few sign changes
4338    sign_changes <= n / 10
4339}
4340
4341/// Check if a singular vector represents a periodic component.
4342fn is_periodic_component(u_col: &[f64]) -> (bool, f64) {
4343    let n = u_col.len();
4344    if n < 4 {
4345        return (false, 0.0);
4346    }
4347
4348    // Use autocorrelation to detect periodicity
4349    let mean: f64 = u_col.iter().sum::<f64>() / n as f64;
4350    let centered: Vec<f64> = u_col.iter().map(|&x| x - mean).collect();
4351
4352    let var: f64 = centered.iter().map(|&x| x * x).sum();
4353    if var < 1e-15 {
4354        return (false, 0.0);
4355    }
4356
4357    // Find first significant peak in autocorrelation
4358    let mut best_period = 0.0;
4359    let mut best_acf = 0.0;
4360
4361    for lag in 2..n / 2 {
4362        let mut acf = 0.0;
4363        for i in 0..(n - lag) {
4364            acf += centered[i] * centered[i + lag];
4365        }
4366        acf /= var;
4367
4368        if acf > best_acf && acf > 0.3 {
4369            best_acf = acf;
4370            best_period = lag as f64;
4371        }
4372    }
4373
4374    let is_periodic = best_acf > 0.3 && best_period > 0.0;
4375    (is_periodic, best_period)
4376}
4377
4378/// Reconstruct time series from grouped components via diagonal averaging.
4379fn reconstruct_grouped(
4380    u: &[f64],
4381    sigma: &[f64],
4382    vt: &[f64],
4383    l: usize,
4384    k: usize,
4385    n: usize,
4386    group_idx: &[usize],
4387) -> Vec<f64> {
4388    if group_idx.is_empty() {
4389        return vec![0.0; n];
4390    }
4391
4392    // Sum of rank-1 matrices for this group
4393    let mut grouped_matrix = vec![0.0; l * k];
4394
4395    for &idx in group_idx {
4396        if idx >= sigma.len() {
4397            continue;
4398        }
4399
4400        let s = sigma[idx];
4401
4402        // Add s * u_i * v_i^T
4403        for j in 0..k {
4404            for i in 0..l {
4405                let u_val = u[i + idx * l];
4406                let v_val = vt[idx + j * sigma.len().min(l)]; // v_t is stored as K x min(L,K)
4407                grouped_matrix[i + j * l] += s * u_val * v_val;
4408            }
4409        }
4410    }
4411
4412    // Diagonal averaging (Hankelization)
4413    diagonal_average(&grouped_matrix, l, k, n)
4414}
4415
4416/// Diagonal averaging to convert trajectory matrix back to time series.
4417fn diagonal_average(matrix: &[f64], l: usize, k: usize, n: usize) -> Vec<f64> {
4418    let mut result = vec![0.0; n];
4419    let mut counts = vec![0.0; n];
4420
4421    // Average along anti-diagonals
4422    for j in 0..k {
4423        for i in 0..l {
4424            let idx = i + j; // Position in original series
4425            if idx < n {
4426                result[idx] += matrix[i + j * l];
4427                counts[idx] += 1.0;
4428            }
4429        }
4430    }
4431
4432    // Normalize by counts
4433    for i in 0..n {
4434        if counts[i] > 0.0 {
4435            result[i] /= counts[i];
4436        }
4437    }
4438
4439    result
4440}
4441
4442/// Compute SSA for functional data (multiple curves).
4443///
4444/// # Arguments
4445/// * `data` - Column-major matrix (n x m) of functional data
4446/// * `n` - Number of samples (rows)
4447/// * `m` - Number of evaluation points (columns)
4448/// * `window_length` - SSA window length. If None, auto-determined.
4449/// * `n_components` - Number of SSA components. Default: 10.
4450///
4451/// # Returns
4452/// `SsaResult` computed from the mean curve.
4453pub fn ssa_fdata(
4454    data: &FdMatrix,
4455    window_length: Option<usize>,
4456    n_components: Option<usize>,
4457) -> SsaResult {
4458    // Compute mean curve
4459    let mean_curve = compute_mean_curve(data);
4460
4461    // Run SSA on mean curve
4462    ssa(&mean_curve, window_length, n_components, None, None)
4463}
4464
4465/// Detect seasonality using SSA.
4466///
4467/// # Arguments
4468/// * `values` - Time series values
4469/// * `window_length` - SSA window length
4470/// * `confidence_threshold` - Minimum confidence for positive detection
4471///
4472/// # Returns
4473/// Tuple of (is_seasonal, detected_period, confidence)
4474pub fn ssa_seasonality(
4475    values: &[f64],
4476    window_length: Option<usize>,
4477    confidence_threshold: Option<f64>,
4478) -> (bool, f64, f64) {
4479    let result = ssa(values, window_length, None, None, None);
4480
4481    let threshold = confidence_threshold.unwrap_or(0.1);
4482    let is_seasonal = result.confidence >= threshold && result.detected_period > 0.0;
4483
4484    (is_seasonal, result.detected_period, result.confidence)
4485}
4486
4487#[cfg(test)]
4488mod tests {
4489    use super::*;
4490    use std::f64::consts::PI;
4491
4492    fn generate_sine(n: usize, m: usize, period: f64, argvals: &[f64]) -> FdMatrix {
4493        let mut data = vec![0.0; n * m];
4494        for i in 0..n {
4495            for j in 0..m {
4496                data[i + j * n] = (2.0 * PI * argvals[j] / period).sin();
4497            }
4498        }
4499        FdMatrix::from_column_major(data, n, m).unwrap()
4500    }
4501
4502    #[test]
4503    fn test_period_estimation_fft() {
4504        let m = 200;
4505        let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.1).collect();
4506        let period = 2.0;
4507        let data = generate_sine(1, m, period, &argvals);
4508
4509        let estimate = estimate_period_fft(&data, &argvals);
4510        assert!((estimate.period - period).abs() < 0.2);
4511        assert!(estimate.confidence > 1.0);
4512    }
4513
4514    #[test]
4515    fn test_peak_detection() {
4516        let m = 100;
4517        let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.1).collect();
4518        let period = 2.0;
4519        let data = generate_sine(1, m, period, &argvals);
4520
4521        let result = detect_peaks(&data, &argvals, Some(1.5), None, false, None);
4522
4523        // Should find approximately 5 peaks (10 / 2)
4524        assert!(!result.peaks[0].is_empty());
4525        assert!((result.mean_period - period).abs() < 0.3);
4526    }
4527
4528    #[test]
4529    fn test_peak_detection_known_sine() {
4530        // Pure sine wave: sin(2*pi*t/2) on [0, 10]
4531        // Peaks occur at t = period/4 + k*period = 0.5, 2.5, 4.5, 6.5, 8.5
4532        let m = 200; // High resolution for accurate detection
4533        let period = 2.0;
4534        let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 10.0 / (m - 1) as f64).collect();
4535        let data: Vec<f64> = argvals
4536            .iter()
4537            .map(|&t| (2.0 * std::f64::consts::PI * t / period).sin())
4538            .collect();
4539        let data = FdMatrix::from_column_major(data, 1, m).unwrap();
4540
4541        let result = detect_peaks(&data, &argvals, None, None, false, None);
4542
4543        // Should find exactly 5 peaks
4544        assert_eq!(
4545            result.peaks[0].len(),
4546            5,
4547            "Expected 5 peaks, got {}. Peak times: {:?}",
4548            result.peaks[0].len(),
4549            result.peaks[0].iter().map(|p| p.time).collect::<Vec<_>>()
4550        );
4551
4552        // Check peak locations are close to expected
4553        let expected_times = [0.5, 2.5, 4.5, 6.5, 8.5];
4554        for (peak, expected) in result.peaks[0].iter().zip(expected_times.iter()) {
4555            assert!(
4556                (peak.time - expected).abs() < 0.15,
4557                "Peak at {:.3} not close to expected {:.3}",
4558                peak.time,
4559                expected
4560            );
4561        }
4562
4563        // Check mean period
4564        assert!(
4565            (result.mean_period - period).abs() < 0.1,
4566            "Mean period {:.3} not close to expected {:.3}",
4567            result.mean_period,
4568            period
4569        );
4570    }
4571
4572    #[test]
4573    fn test_peak_detection_with_min_distance() {
4574        // Same sine wave but with min_distance constraint
4575        let m = 200;
4576        let period = 2.0;
4577        let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 10.0 / (m - 1) as f64).collect();
4578        let data: Vec<f64> = argvals
4579            .iter()
4580            .map(|&t| (2.0 * std::f64::consts::PI * t / period).sin())
4581            .collect();
4582        let data = FdMatrix::from_column_major(data, 1, m).unwrap();
4583
4584        // min_distance = 1.5 should still find all 5 peaks (spacing = 2.0)
4585        let result = detect_peaks(&data, &argvals, Some(1.5), None, false, None);
4586        assert_eq!(
4587            result.peaks[0].len(),
4588            5,
4589            "With min_distance=1.5, expected 5 peaks, got {}",
4590            result.peaks[0].len()
4591        );
4592
4593        // min_distance = 2.5 should find fewer peaks
4594        let result2 = detect_peaks(&data, &argvals, Some(2.5), None, false, None);
4595        assert!(
4596            result2.peaks[0].len() < 5,
4597            "With min_distance=2.5, expected fewer than 5 peaks, got {}",
4598            result2.peaks[0].len()
4599        );
4600    }
4601
4602    #[test]
4603    fn test_peak_detection_period_1() {
4604        // Higher frequency: sin(2*pi*t/1) on [0, 10]
4605        // Peaks at t = 0.25, 1.25, 2.25, ..., 9.25 (10 peaks)
4606        let m = 400; // Higher resolution for higher frequency
4607        let period = 1.0;
4608        let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 10.0 / (m - 1) as f64).collect();
4609        let data: Vec<f64> = argvals
4610            .iter()
4611            .map(|&t| (2.0 * std::f64::consts::PI * t / period).sin())
4612            .collect();
4613        let data = FdMatrix::from_column_major(data, 1, m).unwrap();
4614
4615        let result = detect_peaks(&data, &argvals, None, None, false, None);
4616
4617        // Should find 10 peaks
4618        assert_eq!(
4619            result.peaks[0].len(),
4620            10,
4621            "Expected 10 peaks, got {}",
4622            result.peaks[0].len()
4623        );
4624
4625        // Check mean period
4626        assert!(
4627            (result.mean_period - period).abs() < 0.1,
4628            "Mean period {:.3} not close to expected {:.3}",
4629            result.mean_period,
4630            period
4631        );
4632    }
4633
4634    #[test]
4635    fn test_peak_detection_shifted_sine() {
4636        // Shifted sine: sin(2*pi*t/2) + 1 on [0, 10]
4637        // Same peak locations, just shifted up
4638        let m = 200;
4639        let period = 2.0;
4640        let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 10.0 / (m - 1) as f64).collect();
4641        let data: Vec<f64> = argvals
4642            .iter()
4643            .map(|&t| (2.0 * std::f64::consts::PI * t / period).sin() + 1.0)
4644            .collect();
4645        let data = FdMatrix::from_column_major(data, 1, m).unwrap();
4646
4647        let result = detect_peaks(&data, &argvals, None, None, false, None);
4648
4649        // Should still find 5 peaks
4650        assert_eq!(
4651            result.peaks[0].len(),
4652            5,
4653            "Expected 5 peaks for shifted sine, got {}",
4654            result.peaks[0].len()
4655        );
4656
4657        // Peak values should be around 2.0 (max of sin + 1)
4658        for peak in &result.peaks[0] {
4659            assert!(
4660                (peak.value - 2.0).abs() < 0.05,
4661                "Peak value {:.3} not close to expected 2.0",
4662                peak.value
4663            );
4664        }
4665    }
4666
4667    #[test]
4668    fn test_peak_detection_prominence() {
4669        // Create signal with peaks of different heights
4670        // Large peaks at odd positions, small peaks at even positions
4671        let m = 200;
4672        let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 10.0 / (m - 1) as f64).collect();
4673        let data: Vec<f64> = argvals
4674            .iter()
4675            .map(|&t| {
4676                let base = (2.0 * std::f64::consts::PI * t / 2.0).sin();
4677                // Add small ripples
4678                let ripple = 0.1 * (2.0 * std::f64::consts::PI * t * 4.0).sin();
4679                base + ripple
4680            })
4681            .collect();
4682        let data = FdMatrix::from_column_major(data, 1, m).unwrap();
4683
4684        // Without prominence filter, may find extra peaks from ripples
4685        let result_no_filter = detect_peaks(&data, &argvals, None, None, false, None);
4686
4687        // With prominence filter, should only find major peaks
4688        let result_filtered = detect_peaks(&data, &argvals, None, Some(0.5), false, None);
4689
4690        // Filtered should have fewer or equal peaks
4691        assert!(
4692            result_filtered.peaks[0].len() <= result_no_filter.peaks[0].len(),
4693            "Prominence filter should reduce peak count"
4694        );
4695    }
4696
4697    #[test]
4698    fn test_peak_detection_different_amplitudes() {
4699        // Test with various amplitudes: 0.5, 1.0, 2.0, 5.0
4700        let m = 200;
4701        let period = 2.0;
4702        let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 10.0 / (m - 1) as f64).collect();
4703
4704        for amplitude in [0.5, 1.0, 2.0, 5.0] {
4705            let data: Vec<f64> = argvals
4706                .iter()
4707                .map(|&t| amplitude * (2.0 * std::f64::consts::PI * t / period).sin())
4708                .collect();
4709            let data = FdMatrix::from_column_major(data, 1, m).unwrap();
4710
4711            let result = detect_peaks(&data, &argvals, None, None, false, None);
4712
4713            assert_eq!(
4714                result.peaks[0].len(),
4715                5,
4716                "Amplitude {} should still find 5 peaks",
4717                amplitude
4718            );
4719
4720            // Peak values should be close to amplitude
4721            for peak in &result.peaks[0] {
4722                assert!(
4723                    (peak.value - amplitude).abs() < 0.1,
4724                    "Peak value {:.3} should be close to amplitude {}",
4725                    peak.value,
4726                    amplitude
4727                );
4728            }
4729        }
4730    }
4731
4732    #[test]
4733    fn test_peak_detection_varying_frequency() {
4734        // Signal with varying frequency: chirp-like signal
4735        // Peaks get closer together over time
4736        let m = 400;
4737        let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 10.0 / (m - 1) as f64).collect();
4738
4739        // Frequency increases linearly: f(t) = 0.5 + 0.1*t
4740        // Phase integral: phi(t) = 2*pi * (0.5*t + 0.05*t^2)
4741        let data: Vec<f64> = argvals
4742            .iter()
4743            .map(|&t| {
4744                let phase = 2.0 * std::f64::consts::PI * (0.5 * t + 0.05 * t * t);
4745                phase.sin()
4746            })
4747            .collect();
4748        let data = FdMatrix::from_column_major(data, 1, m).unwrap();
4749
4750        let result = detect_peaks(&data, &argvals, None, None, false, None);
4751
4752        // Should find multiple peaks with decreasing spacing
4753        assert!(
4754            result.peaks[0].len() >= 5,
4755            "Should find at least 5 peaks, got {}",
4756            result.peaks[0].len()
4757        );
4758
4759        // Verify inter-peak distances decrease over time
4760        let distances = &result.inter_peak_distances[0];
4761        if distances.len() >= 3 {
4762            // Later peaks should be closer than earlier peaks
4763            let early_avg = (distances[0] + distances[1]) / 2.0;
4764            let late_avg = (distances[distances.len() - 2] + distances[distances.len() - 1]) / 2.0;
4765            assert!(
4766                late_avg < early_avg,
4767                "Later peaks should be closer: early avg={:.3}, late avg={:.3}",
4768                early_avg,
4769                late_avg
4770            );
4771        }
4772    }
4773
4774    #[test]
4775    fn test_peak_detection_sum_of_sines() {
4776        // Sum of two sine waves with different periods creates non-uniform peak spacing
4777        // y = sin(2*pi*t/2) + 0.5*sin(2*pi*t/3)
4778        let m = 300;
4779        let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 12.0 / (m - 1) as f64).collect();
4780
4781        let data: Vec<f64> = argvals
4782            .iter()
4783            .map(|&t| {
4784                (2.0 * std::f64::consts::PI * t / 2.0).sin()
4785                    + 0.5 * (2.0 * std::f64::consts::PI * t / 3.0).sin()
4786            })
4787            .collect();
4788        let data = FdMatrix::from_column_major(data, 1, m).unwrap();
4789
4790        let result = detect_peaks(&data, &argvals, Some(1.0), None, false, None);
4791
4792        // Should find peaks (exact count depends on interference pattern)
4793        assert!(
4794            result.peaks[0].len() >= 4,
4795            "Should find at least 4 peaks, got {}",
4796            result.peaks[0].len()
4797        );
4798
4799        // Inter-peak distances should vary
4800        let distances = &result.inter_peak_distances[0];
4801        if distances.len() >= 2 {
4802            let min_dist = distances.iter().cloned().fold(f64::INFINITY, f64::min);
4803            let max_dist = distances.iter().cloned().fold(0.0, f64::max);
4804            assert!(
4805                max_dist > min_dist * 1.1,
4806                "Distances should vary: min={:.3}, max={:.3}",
4807                min_dist,
4808                max_dist
4809            );
4810        }
4811    }
4812
4813    #[test]
4814    fn test_seasonal_strength() {
4815        let m = 200;
4816        let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.1).collect();
4817        let period = 2.0;
4818        let data = generate_sine(1, m, period, &argvals);
4819
4820        let strength = seasonal_strength_variance(&data, &argvals, period, 3);
4821        // Pure sine should have high seasonal strength
4822        assert!(strength > 0.8);
4823
4824        let strength_spectral = seasonal_strength_spectral(&data, &argvals, period);
4825        assert!(strength_spectral > 0.5);
4826    }
4827
4828    #[test]
4829    fn test_instantaneous_period() {
4830        let m = 200;
4831        let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.1).collect();
4832        let period = 2.0;
4833        let data = generate_sine(1, m, period, &argvals);
4834
4835        let result = instantaneous_period(&data, &argvals);
4836
4837        // Check that instantaneous period is close to true period (away from boundaries)
4838        let mid_period = result.period[m / 2];
4839        assert!(
4840            (mid_period - period).abs() < 0.5,
4841            "Expected period ~{}, got {}",
4842            period,
4843            mid_period
4844        );
4845    }
4846
4847    #[test]
4848    fn test_peak_timing_analysis() {
4849        // Generate 5 cycles of sine with period 2
4850        let m = 500;
4851        let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.02).collect();
4852        let period = 2.0;
4853        let data = generate_sine(1, m, period, &argvals);
4854
4855        let result = analyze_peak_timing(&data, &argvals, period, Some(11));
4856
4857        // Should find approximately 5 peaks
4858        assert!(!result.peak_times.is_empty());
4859        // Normalized timing should be around 0.25 (peak of sin at π/2)
4860        assert!(result.mean_timing.is_finite());
4861        // Pure sine should have low timing variability
4862        assert!(result.std_timing < 0.1 || result.std_timing.is_nan());
4863    }
4864
4865    #[test]
4866    fn test_seasonality_classification() {
4867        let m = 400;
4868        let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.05).collect();
4869        let period = 2.0;
4870        let data = generate_sine(1, m, period, &argvals);
4871
4872        let result = classify_seasonality(&data, &argvals, period, None, None);
4873
4874        assert!(result.is_seasonal);
4875        assert!(result.seasonal_strength > 0.5);
4876        assert!(matches!(
4877            result.classification,
4878            SeasonalType::StableSeasonal | SeasonalType::VariableTiming
4879        ));
4880    }
4881
4882    #[test]
4883    fn test_otsu_threshold() {
4884        // Bimodal distribution: mix of low (0.1-0.2) and high (0.7-0.9) values
4885        let values = vec![
4886            0.1, 0.12, 0.15, 0.18, 0.11, 0.14, 0.7, 0.75, 0.8, 0.85, 0.9, 0.72,
4887        ];
4888
4889        let threshold = otsu_threshold(&values);
4890
4891        // Threshold should be between the two modes
4892        // Due to small sample size, Otsu's method may not find optimal threshold
4893        // Just verify it returns a reasonable value in the data range
4894        assert!(threshold >= 0.1, "Threshold {} should be >= 0.1", threshold);
4895        assert!(threshold <= 0.9, "Threshold {} should be <= 0.9", threshold);
4896    }
4897
4898    #[test]
4899    fn test_gcv_fourier_nbasis_selection() {
4900        let m = 100;
4901        let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.1).collect();
4902
4903        // Noisy sine wave
4904        let mut data = vec![0.0; m];
4905        for j in 0..m {
4906            data[j] = (2.0 * PI * argvals[j] / 2.0).sin() + 0.1 * (j as f64 * 0.3).sin();
4907        }
4908
4909        let data_mat = crate::matrix::FdMatrix::from_column_major(data, 1, m).unwrap();
4910        let nbasis = crate::basis::select_fourier_nbasis_gcv(&data_mat, &argvals, 5, 25);
4911
4912        // nbasis should be reasonable (between min and max)
4913        assert!(nbasis >= 5);
4914        assert!(nbasis <= 25);
4915    }
4916
4917    #[test]
4918    fn test_detect_multiple_periods() {
4919        let m = 400;
4920        let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.05).collect(); // 0 to 20
4921
4922        // Signal with two periods: 2 and 7
4923        let period1 = 2.0;
4924        let period2 = 7.0;
4925        let mut data = vec![0.0; m];
4926        for j in 0..m {
4927            data[j] = (2.0 * PI * argvals[j] / period1).sin()
4928                + 0.6 * (2.0 * PI * argvals[j] / period2).sin();
4929        }
4930
4931        // Use higher min_strength threshold to properly stop after real periods
4932        let detected = detect_multiple_periods(&data, 1, m, &argvals, 5, 0.4, 0.20);
4933
4934        // Should detect exactly 2 periods with these thresholds
4935        assert!(
4936            detected.len() >= 2,
4937            "Expected at least 2 periods, found {}",
4938            detected.len()
4939        );
4940
4941        // Check that both periods were detected (order depends on amplitude)
4942        let periods: Vec<f64> = detected.iter().map(|d| d.period).collect();
4943        let has_period1 = periods.iter().any(|&p| (p - period1).abs() < 0.3);
4944        let has_period2 = periods.iter().any(|&p| (p - period2).abs() < 0.5);
4945
4946        assert!(
4947            has_period1,
4948            "Expected to find period ~{}, got {:?}",
4949            period1, periods
4950        );
4951        assert!(
4952            has_period2,
4953            "Expected to find period ~{}, got {:?}",
4954            period2, periods
4955        );
4956
4957        // Verify first detected has higher amplitude (amplitude 1.0 vs 0.6)
4958        assert!(
4959            detected[0].amplitude > detected[1].amplitude,
4960            "First detected should have higher amplitude"
4961        );
4962
4963        // Each detected period should have strength and confidence info
4964        for d in &detected {
4965            assert!(
4966                d.strength > 0.0,
4967                "Detected period should have positive strength"
4968            );
4969            assert!(
4970                d.confidence > 0.0,
4971                "Detected period should have positive confidence"
4972            );
4973            assert!(
4974                d.amplitude > 0.0,
4975                "Detected period should have positive amplitude"
4976            );
4977        }
4978    }
4979
4980    // ========================================================================
4981    // Amplitude Modulation Detection Tests
4982    // ========================================================================
4983
4984    #[test]
4985    fn test_amplitude_modulation_stable() {
4986        // Constant amplitude seasonal signal - should detect as Stable
4987        let m = 200;
4988        let period = 0.2;
4989        let argvals: Vec<f64> = (0..m).map(|i| i as f64 / (m - 1) as f64).collect();
4990
4991        // Constant amplitude sine wave
4992        let data: Vec<f64> = argvals
4993            .iter()
4994            .map(|&t| (2.0 * PI * t / period).sin())
4995            .collect();
4996        let data = FdMatrix::from_column_major(data, 1, m).unwrap();
4997
4998        let result = detect_amplitude_modulation(
4999            &data, &argvals, period, 0.15, // modulation threshold
5000            0.3,  // seasonality threshold
5001        );
5002
5003        eprintln!(
5004            "Stable test: is_seasonal={}, has_modulation={}, modulation_score={:.4}, amplitude_trend={:.4}, type={:?}",
5005            result.is_seasonal, result.has_modulation, result.modulation_score, result.amplitude_trend, result.modulation_type
5006        );
5007
5008        assert!(result.is_seasonal, "Should detect seasonality");
5009        assert!(
5010            !result.has_modulation,
5011            "Constant amplitude should not have modulation, got score={:.4}",
5012            result.modulation_score
5013        );
5014        assert_eq!(
5015            result.modulation_type,
5016            ModulationType::Stable,
5017            "Should be classified as Stable"
5018        );
5019    }
5020
5021    #[test]
5022    fn test_amplitude_modulation_emerging() {
5023        // Amplitude increases over time (emerging seasonality)
5024        let m = 200;
5025        let period = 0.2;
5026        let argvals: Vec<f64> = (0..m).map(|i| i as f64 / (m - 1) as f64).collect();
5027
5028        // Amplitude grows from 0.2 to 1.0
5029        let data: Vec<f64> = argvals
5030            .iter()
5031            .map(|&t| {
5032                let amplitude = 0.2 + 0.8 * t; // Linear increase
5033                amplitude * (2.0 * PI * t / period).sin()
5034            })
5035            .collect();
5036
5037        let data = FdMatrix::from_column_major(data, 1, m).unwrap();
5038
5039        let result = detect_amplitude_modulation(&data, &argvals, period, 0.15, 0.2);
5040
5041        eprintln!(
5042            "Emerging test: is_seasonal={}, has_modulation={}, modulation_score={:.4}, amplitude_trend={:.4}, type={:?}",
5043            result.is_seasonal, result.has_modulation, result.modulation_score, result.amplitude_trend, result.modulation_type
5044        );
5045
5046        assert!(result.is_seasonal, "Should detect seasonality");
5047        assert!(
5048            result.has_modulation,
5049            "Growing amplitude should have modulation, score={:.4}",
5050            result.modulation_score
5051        );
5052        assert_eq!(
5053            result.modulation_type,
5054            ModulationType::Emerging,
5055            "Should be classified as Emerging, trend={:.4}",
5056            result.amplitude_trend
5057        );
5058        assert!(
5059            result.amplitude_trend > 0.0,
5060            "Trend should be positive for emerging"
5061        );
5062    }
5063
5064    #[test]
5065    fn test_amplitude_modulation_fading() {
5066        // Amplitude decreases over time (fading seasonality)
5067        let m = 200;
5068        let period = 0.2;
5069        let argvals: Vec<f64> = (0..m).map(|i| i as f64 / (m - 1) as f64).collect();
5070
5071        // Amplitude decreases from 1.0 to 0.2
5072        let data: Vec<f64> = argvals
5073            .iter()
5074            .map(|&t| {
5075                let amplitude = 1.0 - 0.8 * t; // Linear decrease
5076                amplitude * (2.0 * PI * t / period).sin()
5077            })
5078            .collect();
5079
5080        let data = FdMatrix::from_column_major(data, 1, m).unwrap();
5081
5082        let result = detect_amplitude_modulation(&data, &argvals, period, 0.15, 0.2);
5083
5084        eprintln!(
5085            "Fading test: is_seasonal={}, has_modulation={}, modulation_score={:.4}, amplitude_trend={:.4}, type={:?}",
5086            result.is_seasonal, result.has_modulation, result.modulation_score, result.amplitude_trend, result.modulation_type
5087        );
5088
5089        assert!(result.is_seasonal, "Should detect seasonality");
5090        assert!(
5091            result.has_modulation,
5092            "Fading amplitude should have modulation"
5093        );
5094        assert_eq!(
5095            result.modulation_type,
5096            ModulationType::Fading,
5097            "Should be classified as Fading, trend={:.4}",
5098            result.amplitude_trend
5099        );
5100        assert!(
5101            result.amplitude_trend < 0.0,
5102            "Trend should be negative for fading"
5103        );
5104    }
5105
5106    #[test]
5107    fn test_amplitude_modulation_oscillating() {
5108        // Amplitude oscillates (neither purely emerging nor fading)
5109        let m = 200;
5110        let period = 0.1;
5111        let argvals: Vec<f64> = (0..m).map(|i| i as f64 / (m - 1) as f64).collect();
5112
5113        // Amplitude oscillates: high-low-high-low pattern
5114        let data: Vec<f64> = argvals
5115            .iter()
5116            .map(|&t| {
5117                let amplitude = 0.5 + 0.4 * (2.0 * PI * t * 2.0).sin(); // 2 modulation cycles
5118                amplitude * (2.0 * PI * t / period).sin()
5119            })
5120            .collect();
5121
5122        let data = FdMatrix::from_column_major(data, 1, m).unwrap();
5123
5124        let result = detect_amplitude_modulation(&data, &argvals, period, 0.15, 0.2);
5125
5126        eprintln!(
5127            "Oscillating test: is_seasonal={}, has_modulation={}, modulation_score={:.4}, amplitude_trend={:.4}, type={:?}",
5128            result.is_seasonal, result.has_modulation, result.modulation_score, result.amplitude_trend, result.modulation_type
5129        );
5130
5131        assert!(result.is_seasonal, "Should detect seasonality");
5132        // Oscillating has high variation but near-zero trend
5133        if result.has_modulation {
5134            // Trend should be near zero for oscillating
5135            assert!(
5136                result.amplitude_trend.abs() < 0.5,
5137                "Trend should be small for oscillating"
5138            );
5139        }
5140    }
5141
5142    #[test]
5143    fn test_amplitude_modulation_non_seasonal() {
5144        // Pure noise - no seasonality
5145        let m = 100;
5146        let argvals: Vec<f64> = (0..m).map(|i| i as f64 / (m - 1) as f64).collect();
5147
5148        // Random noise (use simple pseudo-random)
5149        let data: Vec<f64> = (0..m)
5150            .map(|i| ((i as f64 * 1.618).sin() * 100.0).fract())
5151            .collect();
5152
5153        let data = FdMatrix::from_column_major(data, 1, m).unwrap();
5154
5155        let result = detect_amplitude_modulation(
5156            &data, &argvals, 0.2, // arbitrary period
5157            0.15, 0.3,
5158        );
5159
5160        assert!(
5161            !result.is_seasonal,
5162            "Noise should not be detected as seasonal"
5163        );
5164        assert_eq!(
5165            result.modulation_type,
5166            ModulationType::NonSeasonal,
5167            "Should be classified as NonSeasonal"
5168        );
5169    }
5170
5171    // ========================================================================
5172    // Wavelet-based Amplitude Modulation Detection Tests
5173    // ========================================================================
5174
5175    #[test]
5176    fn test_wavelet_amplitude_modulation_stable() {
5177        let m = 200;
5178        let period = 0.2;
5179        let argvals: Vec<f64> = (0..m).map(|i| i as f64 / (m - 1) as f64).collect();
5180
5181        let data: Vec<f64> = argvals
5182            .iter()
5183            .map(|&t| (2.0 * PI * t / period).sin())
5184            .collect();
5185
5186        let data = FdMatrix::from_column_major(data, 1, m).unwrap();
5187
5188        let result = detect_amplitude_modulation_wavelet(&data, &argvals, period, 0.15, 0.3);
5189
5190        eprintln!(
5191            "Wavelet stable: is_seasonal={}, has_modulation={}, modulation_score={:.4}, amplitude_trend={:.4}, type={:?}",
5192            result.is_seasonal, result.has_modulation, result.modulation_score, result.amplitude_trend, result.modulation_type
5193        );
5194
5195        assert!(result.is_seasonal, "Should detect seasonality");
5196        assert!(
5197            !result.has_modulation,
5198            "Constant amplitude should not have modulation, got score={:.4}",
5199            result.modulation_score
5200        );
5201    }
5202
5203    #[test]
5204    fn test_wavelet_amplitude_modulation_emerging() {
5205        let m = 200;
5206        let period = 0.2;
5207        let argvals: Vec<f64> = (0..m).map(|i| i as f64 / (m - 1) as f64).collect();
5208
5209        // Amplitude grows from 0.2 to 1.0
5210        let data: Vec<f64> = argvals
5211            .iter()
5212            .map(|&t| {
5213                let amplitude = 0.2 + 0.8 * t;
5214                amplitude * (2.0 * PI * t / period).sin()
5215            })
5216            .collect();
5217
5218        let data = FdMatrix::from_column_major(data, 1, m).unwrap();
5219
5220        let result = detect_amplitude_modulation_wavelet(&data, &argvals, period, 0.15, 0.2);
5221
5222        eprintln!(
5223            "Wavelet emerging: is_seasonal={}, has_modulation={}, modulation_score={:.4}, amplitude_trend={:.4}, type={:?}",
5224            result.is_seasonal, result.has_modulation, result.modulation_score, result.amplitude_trend, result.modulation_type
5225        );
5226
5227        assert!(result.is_seasonal, "Should detect seasonality");
5228        assert!(
5229            result.has_modulation,
5230            "Growing amplitude should have modulation"
5231        );
5232        assert!(
5233            result.amplitude_trend > 0.0,
5234            "Trend should be positive for emerging"
5235        );
5236    }
5237
5238    #[test]
5239    fn test_wavelet_amplitude_modulation_fading() {
5240        let m = 200;
5241        let period = 0.2;
5242        let argvals: Vec<f64> = (0..m).map(|i| i as f64 / (m - 1) as f64).collect();
5243
5244        // Amplitude decreases from 1.0 to 0.2
5245        let data: Vec<f64> = argvals
5246            .iter()
5247            .map(|&t| {
5248                let amplitude = 1.0 - 0.8 * t;
5249                amplitude * (2.0 * PI * t / period).sin()
5250            })
5251            .collect();
5252
5253        let data = FdMatrix::from_column_major(data, 1, m).unwrap();
5254
5255        let result = detect_amplitude_modulation_wavelet(&data, &argvals, period, 0.15, 0.2);
5256
5257        eprintln!(
5258            "Wavelet fading: is_seasonal={}, has_modulation={}, modulation_score={:.4}, amplitude_trend={:.4}, type={:?}",
5259            result.is_seasonal, result.has_modulation, result.modulation_score, result.amplitude_trend, result.modulation_type
5260        );
5261
5262        assert!(result.is_seasonal, "Should detect seasonality");
5263        assert!(
5264            result.has_modulation,
5265            "Fading amplitude should have modulation"
5266        );
5267        assert!(
5268            result.amplitude_trend < 0.0,
5269            "Trend should be negative for fading"
5270        );
5271    }
5272
5273    #[test]
5274    fn test_seasonal_strength_wavelet() {
5275        let m = 200;
5276        let period = 0.2;
5277        let argvals: Vec<f64> = (0..m).map(|i| i as f64 / (m - 1) as f64).collect();
5278
5279        // Pure sine wave at target period - should have high strength
5280        let seasonal_data: Vec<f64> = argvals
5281            .iter()
5282            .map(|&t| (2.0 * PI * t / period).sin())
5283            .collect();
5284
5285        let seasonal_data = FdMatrix::from_column_major(seasonal_data, 1, m).unwrap();
5286
5287        let strength = seasonal_strength_wavelet(&seasonal_data, &argvals, period);
5288        eprintln!("Wavelet strength (pure sine): {:.4}", strength);
5289        assert!(
5290            strength > 0.5,
5291            "Pure sine should have high wavelet strength"
5292        );
5293
5294        // Pure noise - should have low strength
5295        let noise_data: Vec<f64> = (0..m)
5296            .map(|i| ((i * 12345 + 67890) % 1000) as f64 / 1000.0 - 0.5)
5297            .collect();
5298
5299        let noise_data = FdMatrix::from_column_major(noise_data, 1, m).unwrap();
5300
5301        let noise_strength = seasonal_strength_wavelet(&noise_data, &argvals, period);
5302        eprintln!("Wavelet strength (noise): {:.4}", noise_strength);
5303        assert!(
5304            noise_strength < 0.3,
5305            "Noise should have low wavelet strength"
5306        );
5307
5308        // Wrong period - should have lower strength
5309        let wrong_period_strength =
5310            seasonal_strength_wavelet(&seasonal_data, &argvals, period * 2.0);
5311        eprintln!(
5312            "Wavelet strength (wrong period): {:.4}",
5313            wrong_period_strength
5314        );
5315        assert!(
5316            wrong_period_strength < strength,
5317            "Wrong period should have lower strength"
5318        );
5319    }
5320
5321    #[test]
5322    fn test_compute_mean_curve() {
5323        // 2 samples, 3 time points
5324        // Sample 1: [1, 2, 3]
5325        // Sample 2: [2, 4, 6]
5326        // Mean: [1.5, 3, 4.5]
5327        let data = FdMatrix::from_column_major(vec![1.0, 2.0, 2.0, 4.0, 3.0, 6.0], 2, 3).unwrap();
5328        let mean = compute_mean_curve(&data);
5329        assert_eq!(mean.len(), 3);
5330        assert!((mean[0] - 1.5).abs() < 1e-10);
5331        assert!((mean[1] - 3.0).abs() < 1e-10);
5332        assert!((mean[2] - 4.5).abs() < 1e-10);
5333    }
5334
5335    #[test]
5336    fn test_compute_mean_curve_parallel_consistency() {
5337        // Test that parallel and sequential give same results
5338        let n = 10;
5339        let m = 200;
5340        let data: Vec<f64> = (0..n * m).map(|i| (i as f64 * 0.1).sin()).collect();
5341
5342        let data = FdMatrix::from_column_major(data, n, m).unwrap();
5343
5344        let seq_result = compute_mean_curve_impl(&data, false);
5345        let par_result = compute_mean_curve_impl(&data, true);
5346
5347        assert_eq!(seq_result.len(), par_result.len());
5348        for (s, p) in seq_result.iter().zip(par_result.iter()) {
5349            assert!(
5350                (s - p).abs() < 1e-10,
5351                "Sequential and parallel results differ"
5352            );
5353        }
5354    }
5355
5356    #[test]
5357    fn test_interior_bounds() {
5358        // m = 100: edge_skip = 10, interior = [10, 90)
5359        let bounds = interior_bounds(100);
5360        assert!(bounds.is_some());
5361        let (start, end) = bounds.unwrap();
5362        assert_eq!(start, 10);
5363        assert_eq!(end, 90);
5364
5365        // m = 10: edge_skip = 1, but min(1, 2) = 1, max(9, 7) = 9
5366        let bounds = interior_bounds(10);
5367        assert!(bounds.is_some());
5368        let (start, end) = bounds.unwrap();
5369        assert!(start < end);
5370
5371        // Very small m might not have valid interior
5372        let bounds = interior_bounds(2);
5373        // Should still return something as long as end > start
5374        assert!(bounds.is_some() || bounds.is_none());
5375    }
5376
5377    #[test]
5378    fn test_hilbert_transform_pure_sine() {
5379        // Hilbert transform of sin(t) should give cos(t) in imaginary part
5380        let m = 200;
5381        let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.1).collect();
5382        let signal: Vec<f64> = argvals.iter().map(|&t| (2.0 * PI * t).sin()).collect();
5383
5384        let analytic = hilbert_transform(&signal);
5385        assert_eq!(analytic.len(), m);
5386
5387        // Check amplitude is approximately 1
5388        for c in analytic.iter().skip(10).take(m - 20) {
5389            let amp = c.norm();
5390            assert!(
5391                (amp - 1.0).abs() < 0.1,
5392                "Amplitude should be ~1, got {}",
5393                amp
5394            );
5395        }
5396    }
5397
5398    #[test]
5399    fn test_sazed_pure_sine() {
5400        // Pure sine wave with known period
5401        let m = 200;
5402        let period = 2.0;
5403        let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.1).collect();
5404        let data: Vec<f64> = argvals
5405            .iter()
5406            .map(|&t| (2.0 * PI * t / period).sin())
5407            .collect();
5408
5409        let result = sazed(&data, &argvals, None);
5410
5411        assert!(result.period.is_finite(), "SAZED should detect a period");
5412        assert!(
5413            (result.period - period).abs() < 0.3,
5414            "Expected period ~{}, got {}",
5415            period,
5416            result.period
5417        );
5418        assert!(
5419            result.confidence > 0.4,
5420            "Expected confidence > 0.4, got {}",
5421            result.confidence
5422        );
5423        assert!(
5424            result.agreeing_components >= 2,
5425            "Expected at least 2 agreeing components, got {}",
5426            result.agreeing_components
5427        );
5428    }
5429
5430    #[test]
5431    fn test_sazed_noisy_sine() {
5432        // Sine wave with noise
5433        let m = 300;
5434        let period = 3.0;
5435        let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.1).collect();
5436
5437        // Deterministic pseudo-noise using sin with different frequency
5438        let data: Vec<f64> = argvals
5439            .iter()
5440            .enumerate()
5441            .map(|(i, &t)| {
5442                let signal = (2.0 * PI * t / period).sin();
5443                let noise = 0.1 * (17.3 * i as f64).sin();
5444                signal + noise
5445            })
5446            .collect();
5447
5448        let result = sazed(&data, &argvals, Some(0.15));
5449
5450        assert!(
5451            result.period.is_finite(),
5452            "SAZED should detect a period even with noise"
5453        );
5454        assert!(
5455            (result.period - period).abs() < 0.5,
5456            "Expected period ~{}, got {}",
5457            period,
5458            result.period
5459        );
5460    }
5461
5462    #[test]
5463    fn test_sazed_fdata() {
5464        // Multiple samples with same period
5465        let n = 5;
5466        let m = 200;
5467        let period = 2.0;
5468        let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.1).collect();
5469        let data = generate_sine(n, m, period, &argvals);
5470
5471        let result = sazed_fdata(&data, &argvals, None);
5472
5473        assert!(result.period.is_finite(), "SAZED should detect a period");
5474        assert!(
5475            (result.period - period).abs() < 0.3,
5476            "Expected period ~{}, got {}",
5477            period,
5478            result.period
5479        );
5480    }
5481
5482    #[test]
5483    fn test_sazed_short_series() {
5484        // Very short series - should return NaN gracefully
5485        let argvals: Vec<f64> = (0..5).map(|i| i as f64).collect();
5486        let data: Vec<f64> = argvals.iter().map(|&t| t.sin()).collect();
5487
5488        let result = sazed(&data, &argvals, None);
5489
5490        // Should handle gracefully (return NaN for too-short series)
5491        assert!(
5492            result.period.is_nan() || result.period.is_finite(),
5493            "Should return NaN or valid period"
5494        );
5495    }
5496
5497    #[test]
5498    fn test_autoperiod_pure_sine() {
5499        // Pure sine wave with known period
5500        let m = 200;
5501        let period = 2.0;
5502        let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.1).collect();
5503        let data: Vec<f64> = argvals
5504            .iter()
5505            .map(|&t| (2.0 * PI * t / period).sin())
5506            .collect();
5507
5508        let result = autoperiod(&data, &argvals, None, None);
5509
5510        assert!(
5511            result.period.is_finite(),
5512            "Autoperiod should detect a period"
5513        );
5514        assert!(
5515            (result.period - period).abs() < 0.3,
5516            "Expected period ~{}, got {}",
5517            period,
5518            result.period
5519        );
5520        assert!(
5521            result.confidence > 0.0,
5522            "Expected positive confidence, got {}",
5523            result.confidence
5524        );
5525    }
5526
5527    #[test]
5528    fn test_autoperiod_with_trend() {
5529        // Sine wave with linear trend
5530        let m = 300;
5531        let period = 3.0;
5532        let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.1).collect();
5533        let data: Vec<f64> = argvals
5534            .iter()
5535            .map(|&t| 0.2 * t + (2.0 * PI * t / period).sin())
5536            .collect();
5537
5538        let result = autoperiod(&data, &argvals, None, None);
5539
5540        assert!(
5541            result.period.is_finite(),
5542            "Autoperiod should detect a period"
5543        );
5544        // Allow more tolerance with trend
5545        assert!(
5546            (result.period - period).abs() < 0.5,
5547            "Expected period ~{}, got {}",
5548            period,
5549            result.period
5550        );
5551    }
5552
5553    #[test]
5554    fn test_autoperiod_candidates() {
5555        // Verify candidates are generated
5556        let m = 200;
5557        let period = 2.0;
5558        let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.1).collect();
5559        let data: Vec<f64> = argvals
5560            .iter()
5561            .map(|&t| (2.0 * PI * t / period).sin())
5562            .collect();
5563
5564        let result = autoperiod(&data, &argvals, Some(5), Some(10));
5565
5566        assert!(
5567            !result.candidates.is_empty(),
5568            "Should have at least one candidate"
5569        );
5570
5571        // Best candidate should have highest combined score
5572        let max_score = result
5573            .candidates
5574            .iter()
5575            .map(|c| c.combined_score)
5576            .fold(f64::NEG_INFINITY, f64::max);
5577        assert!(
5578            (result.confidence - max_score).abs() < 1e-10,
5579            "Returned confidence should match best candidate's score"
5580        );
5581    }
5582
5583    #[test]
5584    fn test_autoperiod_fdata() {
5585        // Multiple samples with same period
5586        let n = 5;
5587        let m = 200;
5588        let period = 2.0;
5589        let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.1).collect();
5590        let data = generate_sine(n, m, period, &argvals);
5591
5592        let result = autoperiod_fdata(&data, &argvals, None, None);
5593
5594        assert!(
5595            result.period.is_finite(),
5596            "Autoperiod should detect a period"
5597        );
5598        assert!(
5599            (result.period - period).abs() < 0.3,
5600            "Expected period ~{}, got {}",
5601            period,
5602            result.period
5603        );
5604    }
5605
5606    #[test]
5607    fn test_cfd_autoperiod_pure_sine() {
5608        // Pure sine wave with known period
5609        let m = 200;
5610        let period = 2.0;
5611        let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.1).collect();
5612        let data: Vec<f64> = argvals
5613            .iter()
5614            .map(|&t| (2.0 * PI * t / period).sin())
5615            .collect();
5616
5617        let result = cfd_autoperiod(&data, &argvals, None, None);
5618
5619        assert!(
5620            result.period.is_finite(),
5621            "CFDAutoperiod should detect a period"
5622        );
5623        assert!(
5624            (result.period - period).abs() < 0.3,
5625            "Expected period ~{}, got {}",
5626            period,
5627            result.period
5628        );
5629    }
5630
5631    #[test]
5632    fn test_cfd_autoperiod_with_trend() {
5633        // Sine wave with strong linear trend - CFD excels here
5634        let m = 300;
5635        let period = 3.0;
5636        let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.1).collect();
5637        let data: Vec<f64> = argvals
5638            .iter()
5639            .map(|&t| 2.0 * t + (2.0 * PI * t / period).sin())
5640            .collect();
5641
5642        let result = cfd_autoperiod(&data, &argvals, None, None);
5643
5644        assert!(
5645            result.period.is_finite(),
5646            "CFDAutoperiod should detect a period despite trend"
5647        );
5648        // Allow more tolerance since trend can affect detection
5649        assert!(
5650            (result.period - period).abs() < 0.6,
5651            "Expected period ~{}, got {}",
5652            period,
5653            result.period
5654        );
5655    }
5656
5657    #[test]
5658    fn test_cfd_autoperiod_multiple_periods() {
5659        // Signal with two periods - should detect multiple
5660        let m = 400;
5661        let period1 = 2.0;
5662        let period2 = 5.0;
5663        let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.1).collect();
5664        let data: Vec<f64> = argvals
5665            .iter()
5666            .map(|&t| (2.0 * PI * t / period1).sin() + 0.5 * (2.0 * PI * t / period2).sin())
5667            .collect();
5668
5669        let result = cfd_autoperiod(&data, &argvals, None, None);
5670
5671        assert!(
5672            !result.periods.is_empty(),
5673            "Should detect at least one period"
5674        );
5675        // The primary period should be one of the two
5676        let close_to_p1 = (result.period - period1).abs() < 0.5;
5677        let close_to_p2 = (result.period - period2).abs() < 1.0;
5678        assert!(
5679            close_to_p1 || close_to_p2,
5680            "Primary period {} not close to {} or {}",
5681            result.period,
5682            period1,
5683            period2
5684        );
5685    }
5686
5687    #[test]
5688    fn test_cfd_autoperiod_fdata() {
5689        // Multiple samples with same period
5690        let n = 5;
5691        let m = 200;
5692        let period = 2.0;
5693        let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.1).collect();
5694        let data = generate_sine(n, m, period, &argvals);
5695
5696        let result = cfd_autoperiod_fdata(&data, &argvals, None, None);
5697
5698        assert!(
5699            result.period.is_finite(),
5700            "CFDAutoperiod should detect a period"
5701        );
5702        assert!(
5703            (result.period - period).abs() < 0.3,
5704            "Expected period ~{}, got {}",
5705            period,
5706            result.period
5707        );
5708    }
5709
5710    // ========================================================================
5711    // SSA Tests
5712    // ========================================================================
5713
5714    #[test]
5715    fn test_ssa_pure_sine() {
5716        let n = 200;
5717        let period = 12.0;
5718        let values: Vec<f64> = (0..n)
5719            .map(|i| {
5720                let t = i as f64;
5721                0.01 * t + (2.0 * PI * t / period).sin() + 0.05 * ((i * 7) as f64 * 0.1).sin()
5722            })
5723            .collect();
5724
5725        let result = ssa(&values, None, None, None, None);
5726
5727        // trend + seasonal + noise ≈ original
5728        for i in 0..n {
5729            let reconstructed = result.trend[i] + result.seasonal[i] + result.noise[i];
5730            assert!(
5731                (reconstructed - values[i]).abs() < 1e-8,
5732                "SSA reconstruction error at {}: {} vs {}",
5733                i,
5734                reconstructed,
5735                values[i]
5736            );
5737        }
5738
5739        // Singular values should be descending
5740        for i in 1..result.singular_values.len() {
5741            assert!(
5742                result.singular_values[i] <= result.singular_values[i - 1] + 1e-10,
5743                "Singular values should be descending: {} > {}",
5744                result.singular_values[i],
5745                result.singular_values[i - 1]
5746            );
5747        }
5748
5749        // Contributions should sum to <= 1
5750        let total_contrib: f64 = result.contributions.iter().sum();
5751        assert!(
5752            total_contrib <= 1.0 + 1e-10,
5753            "Contributions should sum to <= 1, got {}",
5754            total_contrib
5755        );
5756    }
5757
5758    #[test]
5759    fn test_ssa_explicit_groupings() {
5760        let n = 100;
5761        let period = 10.0;
5762        let values: Vec<f64> = (0..n)
5763            .map(|i| 0.01 * i as f64 + (2.0 * PI * i as f64 / period).sin())
5764            .collect();
5765
5766        let trend_components = [0usize];
5767        let seasonal_components = [1usize, 2];
5768
5769        let result = ssa(
5770            &values,
5771            None,
5772            None,
5773            Some(&trend_components),
5774            Some(&seasonal_components),
5775        );
5776
5777        assert_eq!(result.trend.len(), n);
5778        assert_eq!(result.seasonal.len(), n);
5779        assert_eq!(result.noise.len(), n);
5780
5781        // Reconstruction should still hold
5782        for i in 0..n {
5783            let reconstructed = result.trend[i] + result.seasonal[i] + result.noise[i];
5784            assert!(
5785                (reconstructed - values[i]).abs() < 1e-8,
5786                "SSA explicit grouping reconstruction error at {}",
5787                i
5788            );
5789        }
5790    }
5791
5792    #[test]
5793    fn test_ssa_short_series() {
5794        // n < 4 should trigger early return
5795        let values = vec![1.0, 2.0, 3.0];
5796        let result = ssa(&values, None, None, None, None);
5797
5798        assert_eq!(result.trend, values);
5799        assert_eq!(result.seasonal, vec![0.0; 3]);
5800        assert_eq!(result.noise, vec![0.0; 3]);
5801        assert_eq!(result.n_components, 0);
5802    }
5803
5804    #[test]
5805    fn test_ssa_fdata() {
5806        let n = 5;
5807        let m = 100;
5808        let mut data = vec![0.0; n * m];
5809        for i in 0..n {
5810            let amp = (i + 1) as f64;
5811            for j in 0..m {
5812                data[i + j * n] = amp * (2.0 * PI * j as f64 / 12.0).sin() + 0.01 * j as f64;
5813            }
5814        }
5815
5816        let data = FdMatrix::from_column_major(data, n, m).unwrap();
5817
5818        let result = ssa_fdata(&data, None, None);
5819
5820        assert_eq!(result.trend.len(), m);
5821        assert_eq!(result.seasonal.len(), m);
5822        assert_eq!(result.noise.len(), m);
5823        assert!(!result.singular_values.is_empty());
5824    }
5825
5826    #[test]
5827    fn test_ssa_seasonality() {
5828        // Seasonal signal
5829        let n = 200;
5830        let period = 12.0;
5831        let seasonal_values: Vec<f64> = (0..n)
5832            .map(|i| (2.0 * PI * i as f64 / period).sin())
5833            .collect();
5834
5835        let (is_seasonal, _det_period, confidence) =
5836            ssa_seasonality(&seasonal_values, None, Some(0.05));
5837
5838        // A pure sine should be detected as seasonal
5839        // (confidence depends on component grouping)
5840        assert!(confidence >= 0.0, "Confidence should be non-negative");
5841
5842        // Noise-only signal should not be seasonal
5843        let noise_values: Vec<f64> = (0..n)
5844            .map(|i| ((i * 13 + 7) as f64 * 0.1).sin() * 0.01)
5845            .collect();
5846
5847        let (is_noise_seasonal, _, noise_conf) = ssa_seasonality(&noise_values, None, Some(0.5));
5848
5849        // Noise should have low confidence (but it's not guaranteed to be strictly false
5850        // depending on auto-grouping, so we just check confidence)
5851        let _ = (is_seasonal, is_noise_seasonal, noise_conf);
5852    }
5853
5854    // ========================================================================
5855    // Matrix Profile Tests
5856    // ========================================================================
5857
5858    #[test]
5859    fn test_matrix_profile_periodic() {
5860        let period = 20;
5861        let n = period * 10;
5862        let values: Vec<f64> = (0..n)
5863            .map(|i| (2.0 * PI * i as f64 / period as f64).sin())
5864            .collect();
5865
5866        let result = matrix_profile(&values, Some(15), None);
5867
5868        assert_eq!(result.profile.len(), n - 15 + 1);
5869        assert_eq!(result.profile_index.len(), n - 15 + 1);
5870        assert_eq!(result.subsequence_length, 15);
5871
5872        // Profile should be finite
5873        for &p in &result.profile {
5874            assert!(p.is_finite(), "Profile values should be finite");
5875        }
5876
5877        // Primary period should be close to 20
5878        assert!(
5879            (result.primary_period - period as f64).abs() < 5.0,
5880            "Expected primary_period ≈ {}, got {}",
5881            period,
5882            result.primary_period
5883        );
5884    }
5885
5886    #[test]
5887    fn test_matrix_profile_non_periodic() {
5888        // Linear ramp (non-periodic)
5889        let n = 200;
5890        let values: Vec<f64> = (0..n).map(|i| i as f64 * 0.1).collect();
5891
5892        let result = matrix_profile(&values, Some(10), None);
5893
5894        assert_eq!(result.profile.len(), n - 10 + 1);
5895
5896        // Should have lower confidence than periodic
5897        // (not always strictly 0, depends on ramp structure)
5898        assert!(result.confidence <= 1.0, "Confidence should be <= 1.0");
5899    }
5900
5901    #[test]
5902    fn test_matrix_profile_fdata() {
5903        let n = 3;
5904        let m = 200;
5905        let period = 20.0;
5906        let mut data = vec![0.0; n * m];
5907        for i in 0..n {
5908            let amp = (i + 1) as f64;
5909            for j in 0..m {
5910                data[i + j * n] = amp * (2.0 * PI * j as f64 / period).sin();
5911            }
5912        }
5913
5914        let data = FdMatrix::from_column_major(data, n, m).unwrap();
5915
5916        let result = matrix_profile_fdata(&data, Some(15), None);
5917
5918        assert!(!result.profile.is_empty());
5919        assert!(result.profile_index.len() == result.profile.len());
5920    }
5921
5922    #[test]
5923    fn test_matrix_profile_seasonality() {
5924        let period = 20;
5925        let n = period * 10;
5926        // Periodic signal
5927        let values: Vec<f64> = (0..n)
5928            .map(|i| (2.0 * PI * i as f64 / period as f64).sin())
5929            .collect();
5930
5931        let (is_seasonal, det_period, confidence) =
5932            matrix_profile_seasonality(&values, Some(15), Some(0.05));
5933
5934        assert!(
5935            is_seasonal,
5936            "Periodic signal should be detected as seasonal"
5937        );
5938        assert!(det_period > 0.0, "Detected period should be positive");
5939        assert!(confidence >= 0.05, "Confidence should be above threshold");
5940
5941        // Weak / non-periodic signal
5942        let weak_values: Vec<f64> = (0..n).map(|i| i as f64 * 0.001).collect();
5943        let (is_weak_seasonal, _, _) =
5944            matrix_profile_seasonality(&weak_values, Some(15), Some(0.5));
5945        let _ = is_weak_seasonal; // May or may not detect - we just check it doesn't panic
5946    }
5947
5948    // ========================================================================
5949    // Lomb-Scargle Tests
5950    // ========================================================================
5951
5952    #[test]
5953    fn test_lomb_scargle_regular() {
5954        let n = 200;
5955        let true_period = 5.0;
5956        let times: Vec<f64> = (0..n).map(|i| i as f64 * 0.1).collect();
5957        let values: Vec<f64> = times
5958            .iter()
5959            .map(|&t| (2.0 * PI * t / true_period).sin())
5960            .collect();
5961
5962        let result = lomb_scargle(&times, &values, None, None, None);
5963
5964        assert!(
5965            (result.peak_period - true_period).abs() < 0.5,
5966            "Expected peak_period ≈ {}, got {}",
5967            true_period,
5968            result.peak_period
5969        );
5970        assert!(
5971            result.false_alarm_probability < 0.05,
5972            "FAP should be low for strong signal: {}",
5973            result.false_alarm_probability
5974        );
5975        assert!(result.peak_power > 0.0, "Peak power should be positive");
5976        assert!(!result.frequencies.is_empty());
5977        assert_eq!(result.frequencies.len(), result.power.len());
5978        assert_eq!(result.frequencies.len(), result.periods.len());
5979    }
5980
5981    #[test]
5982    fn test_lomb_scargle_irregular() {
5983        let true_period = 5.0;
5984        // Irregularly sampled: take a subset of regular samples
5985        let all_times: Vec<f64> = (0..300).map(|i| i as f64 * 0.1).collect();
5986        // Take every other point with some jitter-like selection
5987        let times: Vec<f64> = all_times
5988            .iter()
5989            .enumerate()
5990            .filter(|(i, _)| i % 2 == 0 || i % 3 == 0)
5991            .map(|(_, &t)| t)
5992            .collect();
5993        let values: Vec<f64> = times
5994            .iter()
5995            .map(|&t| (2.0 * PI * t / true_period).sin())
5996            .collect();
5997
5998        let result = lomb_scargle(&times, &values, None, None, None);
5999
6000        assert!(
6001            (result.peak_period - true_period).abs() < 1.0,
6002            "Irregular LS: expected period ≈ {}, got {}",
6003            true_period,
6004            result.peak_period
6005        );
6006    }
6007
6008    #[test]
6009    fn test_lomb_scargle_custom_frequencies() {
6010        let n = 100;
6011        let true_period = 5.0;
6012        let times: Vec<f64> = (0..n).map(|i| i as f64 * 0.1).collect();
6013        let values: Vec<f64> = times
6014            .iter()
6015            .map(|&t| (2.0 * PI * t / true_period).sin())
6016            .collect();
6017
6018        // Explicit frequency grid
6019        let frequencies: Vec<f64> = (1..50).map(|i| i as f64 * 0.01).collect();
6020        let result = lomb_scargle(&times, &values, Some(&frequencies), None, None);
6021
6022        assert_eq!(result.frequencies.len(), frequencies.len());
6023        assert_eq!(result.power.len(), frequencies.len());
6024        assert!(result.peak_power > 0.0);
6025    }
6026
6027    #[test]
6028    fn test_lomb_scargle_fdata() {
6029        let n = 5;
6030        let m = 200;
6031        let period = 5.0;
6032        let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.1).collect();
6033        let data = generate_sine(n, m, period, &argvals);
6034
6035        let result = lomb_scargle_fdata(&data, &argvals, None, None);
6036
6037        assert!(
6038            (result.peak_period - period).abs() < 0.5,
6039            "Fdata LS: expected period ≈ {}, got {}",
6040            period,
6041            result.peak_period
6042        );
6043        assert!(!result.frequencies.is_empty());
6044    }
6045
6046    // ========================================================================
6047    // Seasonality change detection tests
6048    // ========================================================================
6049
6050    #[test]
6051    fn test_detect_seasonality_changes_onset() {
6052        let m = 400;
6053        let period = 2.0;
6054        let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.05).collect(); // 0..20
6055
6056        // First half: noise-like (low seasonality), second half: strong sine
6057        let data: Vec<f64> = argvals
6058            .iter()
6059            .map(|&t| {
6060                if t < 10.0 {
6061                    // Weak signal
6062                    0.05 * ((t * 13.0).sin() + (t * 7.0).cos())
6063                } else {
6064                    // Strong seasonal
6065                    (2.0 * PI * t / period).sin()
6066                }
6067            })
6068            .collect();
6069        let data = FdMatrix::from_column_major(data, 1, m).unwrap();
6070
6071        let result = detect_seasonality_changes(&data, &argvals, period, 0.3, 4.0, 2.0);
6072
6073        assert!(
6074            !result.strength_curve.is_empty(),
6075            "Strength curve should not be empty"
6076        );
6077        assert_eq!(result.strength_curve.len(), m);
6078
6079        // Should detect at least one change point (onset around t=10)
6080        if !result.change_points.is_empty() {
6081            let onset_points: Vec<_> = result
6082                .change_points
6083                .iter()
6084                .filter(|cp| cp.change_type == ChangeType::Onset)
6085                .collect();
6086            // At least one onset should exist near the transition
6087            assert!(!onset_points.is_empty(), "Should detect Onset change point");
6088        }
6089    }
6090
6091    #[test]
6092    fn test_detect_seasonality_changes_no_change() {
6093        let m = 400;
6094        let period = 2.0;
6095        let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.05).collect();
6096
6097        // Consistently strong seasonal signal
6098        let data: Vec<f64> = argvals
6099            .iter()
6100            .map(|&t| (2.0 * PI * t / period).sin())
6101            .collect();
6102        let data = FdMatrix::from_column_major(data, 1, m).unwrap();
6103
6104        let result = detect_seasonality_changes(&data, &argvals, period, 0.3, 4.0, 2.0);
6105
6106        assert!(!result.strength_curve.is_empty());
6107        // With consistently seasonal data, there should be no Cessation points
6108        let cessation_points: Vec<_> = result
6109            .change_points
6110            .iter()
6111            .filter(|cp| cp.change_type == ChangeType::Cessation)
6112            .collect();
6113        assert!(
6114            cessation_points.is_empty(),
6115            "Consistently seasonal signal should have no Cessation points, found {}",
6116            cessation_points.len()
6117        );
6118    }
6119
6120    // ========================================================================
6121    // Period estimation tests (ACF and regression)
6122    // ========================================================================
6123
6124    #[test]
6125    fn test_estimate_period_acf() {
6126        let m = 200;
6127        let period = 2.0;
6128        let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.1).collect();
6129        let data = generate_sine(1, m, period, &argvals);
6130
6131        let estimate = estimate_period_acf(data.as_slice(), 1, m, &argvals, m / 2);
6132
6133        assert!(
6134            estimate.period.is_finite(),
6135            "ACF period estimate should be finite"
6136        );
6137        assert!(
6138            (estimate.period - period).abs() < 0.5,
6139            "ACF expected period ≈ {}, got {}",
6140            period,
6141            estimate.period
6142        );
6143        assert!(
6144            estimate.confidence > 0.0,
6145            "ACF confidence should be positive"
6146        );
6147    }
6148
6149    #[test]
6150    fn test_estimate_period_regression() {
6151        let m = 200;
6152        let period = 2.0;
6153        let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.1).collect();
6154        let data = generate_sine(1, m, period, &argvals);
6155
6156        let estimate =
6157            estimate_period_regression(data.as_slice(), 1, m, &argvals, 1.5, 3.0, 100, 3);
6158
6159        assert!(
6160            estimate.period.is_finite(),
6161            "Regression period estimate should be finite"
6162        );
6163        assert!(
6164            (estimate.period - period).abs() < 0.5,
6165            "Regression expected period ≈ {}, got {}",
6166            period,
6167            estimate.period
6168        );
6169        assert!(
6170            estimate.confidence > 0.0,
6171            "Regression confidence should be positive"
6172        );
6173    }
6174
6175    // ========================================================================
6176    // Seasonal strength windowed test
6177    // ========================================================================
6178
6179    #[test]
6180    fn test_seasonal_strength_windowed_variance() {
6181        let m = 200;
6182        let period = 2.0;
6183        let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.1).collect();
6184        let data = generate_sine(1, m, period, &argvals);
6185
6186        let strengths = seasonal_strength_windowed(
6187            &data,
6188            &argvals,
6189            period,
6190            4.0, // window_size
6191            StrengthMethod::Variance,
6192        );
6193
6194        assert_eq!(strengths.len(), m, "Should return m values");
6195
6196        // Interior values (away from edges) should be in [0,1]
6197        let interior_start = m / 4;
6198        let interior_end = 3 * m / 4;
6199        for i in interior_start..interior_end {
6200            let s = strengths[i];
6201            if s.is_finite() {
6202                assert!(
6203                    (-0.01..=1.01).contains(&s),
6204                    "Windowed strength at {} should be near [0,1], got {}",
6205                    i,
6206                    s
6207                );
6208            }
6209        }
6210    }
6211}