Skip to main content

fdars_core/
seasonal.rs

1//! Seasonal time series analysis for functional data.
2//!
3//! This module provides functions for analyzing seasonal patterns in functional data:
4//! - Period estimation (FFT, autocorrelation, regression-based)
5//! - Peak detection with prominence calculation
6//! - Seasonal strength measurement (variance and spectral methods)
7//! - Seasonality change detection (onset/cessation)
8//! - Instantaneous period estimation for drifting seasonality
9//! - Peak timing variability analysis for short series
10//! - Seasonality classification
11
12use crate::basis::fourier_basis_with_period;
13use crate::fdata::deriv_1d;
14use crate::{iter_maybe_parallel, slice_maybe_parallel};
15use num_complex::Complex;
16#[cfg(feature = "parallel")]
17use rayon::iter::ParallelIterator;
18use rustfft::FftPlanner;
19use std::f64::consts::PI;
20
21/// Result of period estimation.
22#[derive(Debug, Clone)]
23pub struct PeriodEstimate {
24    /// Estimated period
25    pub period: f64,
26    /// Dominant frequency (1/period)
27    pub frequency: f64,
28    /// Power at the dominant frequency
29    pub power: f64,
30    /// Confidence measure (ratio of peak power to mean power)
31    pub confidence: f64,
32}
33
34/// A detected peak in functional data.
35#[derive(Debug, Clone)]
36pub struct Peak {
37    /// Time at which the peak occurs
38    pub time: f64,
39    /// Value at the peak
40    pub value: f64,
41    /// Prominence of the peak (height relative to surrounding valleys)
42    pub prominence: f64,
43}
44
45/// Result of peak detection.
46#[derive(Debug, Clone)]
47pub struct PeakDetectionResult {
48    /// Peaks for each sample: `peaks[sample_idx]` contains peaks for that sample
49    pub peaks: Vec<Vec<Peak>>,
50    /// Inter-peak distances for each sample
51    pub inter_peak_distances: Vec<Vec<f64>>,
52    /// Mean period estimated from inter-peak distances across all samples
53    pub mean_period: f64,
54}
55
56/// A detected period from multiple period detection.
57#[derive(Debug, Clone)]
58pub struct DetectedPeriod {
59    /// Estimated period
60    pub period: f64,
61    /// FFT confidence (ratio of peak power to mean power)
62    pub confidence: f64,
63    /// Seasonal strength at this period (variance explained)
64    pub strength: f64,
65    /// Amplitude of the sinusoidal component
66    pub amplitude: f64,
67    /// Phase of the sinusoidal component (radians)
68    pub phase: f64,
69    /// Iteration number (1-indexed)
70    pub iteration: usize,
71}
72
73/// Method for computing seasonal strength.
74#[derive(Debug, Clone, Copy, PartialEq, Eq)]
75pub enum StrengthMethod {
76    /// Variance decomposition: Var(seasonal) / Var(total)
77    Variance,
78    /// Spectral: power at seasonal frequencies / total power
79    Spectral,
80}
81
82/// A detected change point in seasonality.
83#[derive(Debug, Clone)]
84pub struct ChangePoint {
85    /// Time at which the change occurs
86    pub time: f64,
87    /// Type of change
88    pub change_type: ChangeType,
89    /// Seasonal strength before the change
90    pub strength_before: f64,
91    /// Seasonal strength after the change
92    pub strength_after: f64,
93}
94
95/// Type of seasonality change.
96#[derive(Debug, Clone, Copy, PartialEq, Eq)]
97pub enum ChangeType {
98    /// Series becomes seasonal
99    Onset,
100    /// Series stops being seasonal
101    Cessation,
102}
103
104/// Result of seasonality change detection.
105#[derive(Debug, Clone)]
106pub struct ChangeDetectionResult {
107    /// Detected change points
108    pub change_points: Vec<ChangePoint>,
109    /// Time-varying seasonal strength curve used for detection
110    pub strength_curve: Vec<f64>,
111}
112
113/// Result of instantaneous period estimation.
114#[derive(Debug, Clone)]
115pub struct InstantaneousPeriod {
116    /// Instantaneous period at each time point
117    pub period: Vec<f64>,
118    /// Instantaneous frequency at each time point
119    pub frequency: Vec<f64>,
120    /// Instantaneous amplitude (envelope) at each time point
121    pub amplitude: Vec<f64>,
122}
123
124/// Result of peak timing variability analysis.
125#[derive(Debug, Clone)]
126pub struct PeakTimingResult {
127    /// Peak times for each cycle
128    pub peak_times: Vec<f64>,
129    /// Peak values
130    pub peak_values: Vec<f64>,
131    /// Within-period timing (0-1 scale, e.g., day-of-year / 365)
132    pub normalized_timing: Vec<f64>,
133    /// Mean normalized timing
134    pub mean_timing: f64,
135    /// Standard deviation of normalized timing
136    pub std_timing: f64,
137    /// Range of normalized timing (max - min)
138    pub range_timing: f64,
139    /// Variability score (0 = stable, 1 = highly variable)
140    pub variability_score: f64,
141    /// Trend in timing (positive = peaks getting later)
142    pub timing_trend: f64,
143    /// Cycle indices (1-indexed)
144    pub cycle_indices: Vec<usize>,
145}
146
147/// Type of seasonality pattern.
148#[derive(Debug, Clone, Copy, PartialEq, Eq)]
149pub enum SeasonalType {
150    /// Regular peaks with consistent timing
151    StableSeasonal,
152    /// Regular peaks but timing shifts between cycles
153    VariableTiming,
154    /// Some cycles seasonal, some not
155    IntermittentSeasonal,
156    /// No clear seasonality
157    NonSeasonal,
158}
159
160/// Result of seasonality classification.
161#[derive(Debug, Clone)]
162pub struct SeasonalityClassification {
163    /// Whether the series is seasonal overall
164    pub is_seasonal: bool,
165    /// Whether peak timing is stable across cycles
166    pub has_stable_timing: bool,
167    /// Timing variability score (0 = stable, 1 = highly variable)
168    pub timing_variability: f64,
169    /// Overall seasonal strength
170    pub seasonal_strength: f64,
171    /// Per-cycle seasonal strength
172    pub cycle_strengths: Vec<f64>,
173    /// Indices of weak/missing seasons (0-indexed)
174    pub weak_seasons: Vec<usize>,
175    /// Classification type
176    pub classification: SeasonalType,
177    /// Peak timing analysis (if peaks were detected)
178    pub peak_timing: Option<PeakTimingResult>,
179}
180
181/// Method for automatic threshold selection.
182#[derive(Debug, Clone, Copy, PartialEq)]
183pub enum ThresholdMethod {
184    /// Fixed user-specified threshold
185    Fixed(f64),
186    /// Percentile of strength distribution
187    Percentile(f64),
188    /// Otsu's method (optimal bimodal separation)
189    Otsu,
190}
191
192/// Type of amplitude modulation pattern.
193#[derive(Debug, Clone, Copy, PartialEq, Eq)]
194pub enum ModulationType {
195    /// Constant amplitude (no modulation)
196    Stable,
197    /// Amplitude increases over time (seasonality emerges)
198    Emerging,
199    /// Amplitude decreases over time (seasonality fades)
200    Fading,
201    /// Amplitude varies non-monotonically
202    Oscillating,
203    /// No seasonality detected
204    NonSeasonal,
205}
206
207/// Result of amplitude modulation detection.
208#[derive(Debug, Clone)]
209pub struct AmplitudeModulationResult {
210    /// Whether seasonality is present (using robust spectral method)
211    pub is_seasonal: bool,
212    /// Overall seasonal strength (spectral method)
213    pub seasonal_strength: f64,
214    /// Whether amplitude modulation is detected
215    pub has_modulation: bool,
216    /// Type of amplitude modulation
217    pub modulation_type: ModulationType,
218    /// Coefficient of variation of time-varying strength (0 = stable, higher = more modulation)
219    pub modulation_score: f64,
220    /// Trend in amplitude (-1 to 1: negative = fading, positive = emerging)
221    pub amplitude_trend: f64,
222    /// Time-varying seasonal strength curve
223    pub strength_curve: Vec<f64>,
224    /// Time points corresponding to strength_curve
225    pub time_points: Vec<f64>,
226    /// Minimum strength in the curve
227    pub min_strength: f64,
228    /// Maximum strength in the curve
229    pub max_strength: f64,
230}
231
232/// Result of wavelet-based amplitude modulation detection.
233#[derive(Debug, Clone)]
234pub struct WaveletAmplitudeResult {
235    /// Whether seasonality is present
236    pub is_seasonal: bool,
237    /// Overall seasonal strength
238    pub seasonal_strength: f64,
239    /// Whether amplitude modulation is detected
240    pub has_modulation: bool,
241    /// Type of amplitude modulation
242    pub modulation_type: ModulationType,
243    /// Coefficient of variation of wavelet amplitude
244    pub modulation_score: f64,
245    /// Trend in amplitude (-1 to 1)
246    pub amplitude_trend: f64,
247    /// Wavelet amplitude at the seasonal frequency over time
248    pub wavelet_amplitude: Vec<f64>,
249    /// Time points corresponding to wavelet_amplitude
250    pub time_points: Vec<f64>,
251    /// Scale (period) used for wavelet analysis
252    pub scale: f64,
253}
254
255// ============================================================================
256// Internal helper functions
257// ============================================================================
258
259/// Compute mean curve from column-major data matrix.
260///
261/// # Arguments
262/// * `data` - Column-major matrix (n x m)
263/// * `n` - Number of samples (rows)
264/// * `m` - Number of evaluation points (columns)
265/// * `parallel` - Use parallel iteration (default: true)
266///
267/// # Returns
268/// Mean curve of length m
269#[inline]
270fn compute_mean_curve_impl(data: &[f64], n: usize, m: usize, parallel: bool) -> Vec<f64> {
271    if parallel && m >= 100 {
272        // Use parallel iteration for larger datasets
273        iter_maybe_parallel!(0..m)
274            .map(|j| {
275                let mut sum = 0.0;
276                for i in 0..n {
277                    sum += data[i + j * n];
278                }
279                sum / n as f64
280            })
281            .collect()
282    } else {
283        // Sequential for small datasets or when disabled
284        (0..m)
285            .map(|j| {
286                let mut sum = 0.0;
287                for i in 0..n {
288                    sum += data[i + j * n];
289                }
290                sum / n as f64
291            })
292            .collect()
293    }
294}
295
296/// Compute mean curve (parallel by default for m >= 100).
297#[inline]
298fn compute_mean_curve(data: &[f64], n: usize, m: usize) -> Vec<f64> {
299    compute_mean_curve_impl(data, n, m, true)
300}
301
302/// Compute interior bounds for edge-skipping (10% on each side).
303///
304/// Used to avoid edge effects in wavelet and other analyses.
305///
306/// # Arguments
307/// * `m` - Total number of points
308///
309/// # Returns
310/// `(interior_start, interior_end)` indices, or `None` if range is invalid
311#[inline]
312fn interior_bounds(m: usize) -> Option<(usize, usize)> {
313    let edge_skip = (m as f64 * 0.1) as usize;
314    let interior_start = edge_skip.min(m / 4);
315    let interior_end = m.saturating_sub(edge_skip).max(m * 3 / 4);
316
317    if interior_end <= interior_start {
318        None
319    } else {
320        Some((interior_start, interior_end))
321    }
322}
323
324/// Validate interior bounds with minimum span requirement.
325fn valid_interior_bounds(m: usize, min_span: usize) -> Option<(usize, usize)> {
326    interior_bounds(m).filter(|&(s, e)| e > s + min_span)
327}
328
329/// Compute periodogram from data using FFT.
330/// Returns (frequencies, power) where frequencies are in cycles per unit time.
331fn periodogram(data: &[f64], argvals: &[f64]) -> (Vec<f64>, Vec<f64>) {
332    let n = data.len();
333    if n < 2 || argvals.len() != n {
334        return (Vec::new(), Vec::new());
335    }
336
337    let dt = (argvals[n - 1] - argvals[0]) / (n - 1) as f64;
338    let fs = 1.0 / dt; // Sampling frequency
339
340    let mut planner = FftPlanner::<f64>::new();
341    let fft = planner.plan_fft_forward(n);
342
343    let mut buffer: Vec<Complex<f64>> = data.iter().map(|&x| Complex::new(x, 0.0)).collect();
344    fft.process(&mut buffer);
345
346    // Compute power spectrum (one-sided)
347    let n_freq = n / 2 + 1;
348    let mut power = Vec::with_capacity(n_freq);
349    let mut frequencies = Vec::with_capacity(n_freq);
350
351    for k in 0..n_freq {
352        let freq = k as f64 * fs / n as f64;
353        frequencies.push(freq);
354
355        let p = buffer[k].norm_sqr() / (n as f64 * n as f64);
356        // Double power for non-DC and non-Nyquist frequencies (one-sided spectrum)
357        let p = if k > 0 && k < n / 2 { 2.0 * p } else { p };
358        power.push(p);
359    }
360
361    (frequencies, power)
362}
363
364/// Compute autocorrelation function up to max_lag.
365fn autocorrelation(data: &[f64], max_lag: usize) -> Vec<f64> {
366    let n = data.len();
367    if n == 0 {
368        return Vec::new();
369    }
370
371    let mean: f64 = data.iter().sum::<f64>() / n as f64;
372    let var: f64 = data.iter().map(|&x| (x - mean).powi(2)).sum::<f64>() / n as f64;
373
374    if var < 1e-15 {
375        return vec![1.0; max_lag.min(n) + 1];
376    }
377
378    let max_lag = max_lag.min(n - 1);
379    let mut acf = Vec::with_capacity(max_lag + 1);
380
381    for lag in 0..=max_lag {
382        let mut sum = 0.0;
383        for i in 0..(n - lag) {
384            sum += (data[i] - mean) * (data[i + lag] - mean);
385        }
386        acf.push(sum / (n as f64 * var));
387    }
388
389    acf
390}
391
392/// Try to add a peak, respecting minimum distance. Replaces previous peak if closer but higher.
393fn try_add_peak(peaks: &mut Vec<usize>, candidate: usize, signal: &[f64], min_distance: usize) {
394    if peaks.is_empty() || candidate - *peaks.last().unwrap() >= min_distance {
395        peaks.push(candidate);
396    } else if signal[candidate] > signal[*peaks.last().unwrap()] {
397        *peaks.last_mut().unwrap() = candidate;
398    }
399}
400
401/// Find peaks in a 1D signal, returning indices.
402fn find_peaks_1d(signal: &[f64], min_distance: usize) -> Vec<usize> {
403    let n = signal.len();
404    if n < 3 {
405        return Vec::new();
406    }
407
408    let mut peaks = Vec::new();
409
410    for i in 1..(n - 1) {
411        if signal[i] > signal[i - 1] && signal[i] > signal[i + 1] {
412            try_add_peak(&mut peaks, i, signal, min_distance);
413        }
414    }
415
416    peaks
417}
418
419/// Compute prominence for a peak (height above surrounding valleys).
420fn compute_prominence(signal: &[f64], peak_idx: usize) -> f64 {
421    let n = signal.len();
422    let peak_val = signal[peak_idx];
423
424    // Find lowest point between peak and boundaries/higher peaks
425    let mut left_min = peak_val;
426    for i in (0..peak_idx).rev() {
427        if signal[i] >= peak_val {
428            break;
429        }
430        left_min = left_min.min(signal[i]);
431    }
432
433    let mut right_min = peak_val;
434    for i in (peak_idx + 1)..n {
435        if signal[i] >= peak_val {
436            break;
437        }
438        right_min = right_min.min(signal[i]);
439    }
440
441    peak_val - left_min.max(right_min)
442}
443
444/// Hilbert transform using FFT to compute analytic signal.
445///
446/// # Arguments
447/// * `signal` - Input real signal
448///
449/// # Returns
450/// Analytic signal as complex vector (real part = original, imaginary = Hilbert transform)
451pub fn hilbert_transform(signal: &[f64]) -> Vec<Complex<f64>> {
452    let n = signal.len();
453    if n == 0 {
454        return Vec::new();
455    }
456
457    let mut planner = FftPlanner::<f64>::new();
458    let fft_forward = planner.plan_fft_forward(n);
459    let fft_inverse = planner.plan_fft_inverse(n);
460
461    // Forward FFT
462    let mut buffer: Vec<Complex<f64>> = signal.iter().map(|&x| Complex::new(x, 0.0)).collect();
463    fft_forward.process(&mut buffer);
464
465    // Create analytic signal in frequency domain
466    // H[0] = 1, H[1..n/2] = 2, H[n/2] = 1 (if n even), H[n/2+1..] = 0
467    let half = n / 2;
468    for k in 1..half {
469        buffer[k] *= 2.0;
470    }
471    for k in (half + 1)..n {
472        buffer[k] = Complex::new(0.0, 0.0);
473    }
474
475    // Inverse FFT
476    fft_inverse.process(&mut buffer);
477
478    // Normalize
479    for c in buffer.iter_mut() {
480        *c /= n as f64;
481    }
482
483    buffer
484}
485
486/// Unwrap phase to remove 2π discontinuities.
487fn unwrap_phase(phase: &[f64]) -> Vec<f64> {
488    if phase.is_empty() {
489        return Vec::new();
490    }
491
492    let mut unwrapped = vec![phase[0]];
493    let mut cumulative_correction = 0.0;
494
495    for i in 1..phase.len() {
496        let diff = phase[i] - phase[i - 1];
497
498        // Check for wraparound
499        if diff > PI {
500            cumulative_correction -= 2.0 * PI;
501        } else if diff < -PI {
502            cumulative_correction += 2.0 * PI;
503        }
504
505        unwrapped.push(phase[i] + cumulative_correction);
506    }
507
508    unwrapped
509}
510
511/// Morlet wavelet function.
512///
513/// The Morlet wavelet is a complex exponential modulated by a Gaussian:
514/// ψ(t) = exp(i * ω₀ * t) * exp(-t² / 2)
515///
516/// where ω₀ is the central frequency (typically 6 for good time-frequency trade-off).
517fn morlet_wavelet(t: f64, omega0: f64) -> Complex<f64> {
518    let gaussian = (-t * t / 2.0).exp();
519    let oscillation = Complex::new((omega0 * t).cos(), (omega0 * t).sin());
520    oscillation * gaussian
521}
522
523/// Continuous Wavelet Transform at a single scale using Morlet wavelet.
524///
525/// Computes the wavelet coefficients at the specified scale (period) for all time points.
526/// Uses convolution in the time domain.
527///
528/// # Arguments
529/// * `signal` - Input signal
530/// * `argvals` - Time points
531/// * `scale` - Scale parameter (related to period)
532/// * `omega0` - Central frequency of Morlet wavelet (default: 6.0)
533///
534/// # Returns
535/// Complex wavelet coefficients at each time point
536#[allow(dead_code)]
537fn cwt_morlet(signal: &[f64], argvals: &[f64], scale: f64, omega0: f64) -> Vec<Complex<f64>> {
538    let n = signal.len();
539    if n == 0 || scale <= 0.0 {
540        return Vec::new();
541    }
542
543    let dt = (argvals[n - 1] - argvals[0]) / (n - 1) as f64;
544
545    // Compute wavelet coefficients via convolution
546    // W(a, b) = (1/sqrt(a)) * Σ x[k] * ψ*((t[k] - b) / a) * dt
547    let norm = 1.0 / scale.sqrt();
548
549    (0..n)
550        .map(|b| {
551            let mut sum = Complex::new(0.0, 0.0);
552            for k in 0..n {
553                let t_normalized = (argvals[k] - argvals[b]) / scale;
554                // Only compute within reasonable range (Gaussian decays quickly)
555                if t_normalized.abs() < 6.0 {
556                    let wavelet = morlet_wavelet(t_normalized, omega0);
557                    sum += signal[k] * wavelet.conj();
558                }
559            }
560            sum * norm * dt
561        })
562        .collect()
563}
564
565/// Continuous Wavelet Transform at a single scale using FFT (faster for large signals).
566///
567/// Uses the convolution theorem: CWT = IFFT(FFT(signal) * FFT(wavelet)*)
568fn cwt_morlet_fft(signal: &[f64], argvals: &[f64], scale: f64, omega0: f64) -> Vec<Complex<f64>> {
569    let n = signal.len();
570    if n == 0 || scale <= 0.0 {
571        return Vec::new();
572    }
573
574    let dt = (argvals[n - 1] - argvals[0]) / (n - 1) as f64;
575    let norm = 1.0 / scale.sqrt();
576
577    // Compute wavelet in time domain centered at t=0
578    let wavelet_time: Vec<Complex<f64>> = (0..n)
579        .map(|k| {
580            // Center the wavelet
581            let t = if k <= n / 2 {
582                k as f64 * dt / scale
583            } else {
584                (k as f64 - n as f64) * dt / scale
585            };
586            morlet_wavelet(t, omega0) * norm
587        })
588        .collect();
589
590    let mut planner = FftPlanner::<f64>::new();
591    let fft_forward = planner.plan_fft_forward(n);
592    let fft_inverse = planner.plan_fft_inverse(n);
593
594    // FFT of signal
595    let mut signal_fft: Vec<Complex<f64>> = signal.iter().map(|&x| Complex::new(x, 0.0)).collect();
596    fft_forward.process(&mut signal_fft);
597
598    // FFT of wavelet
599    let mut wavelet_fft = wavelet_time;
600    fft_forward.process(&mut wavelet_fft);
601
602    // Multiply in frequency domain (use conjugate of wavelet FFT for correlation)
603    let mut result: Vec<Complex<f64>> = signal_fft
604        .iter()
605        .zip(wavelet_fft.iter())
606        .map(|(s, w)| *s * w.conj())
607        .collect();
608
609    // Inverse FFT
610    fft_inverse.process(&mut result);
611
612    // Normalize and scale by dt
613    for c in result.iter_mut() {
614        *c *= dt / n as f64;
615    }
616
617    result
618}
619
620/// Compute Otsu's threshold for bimodal separation.
621///
622/// Finds the threshold that minimizes within-class variance (or equivalently
623/// maximizes between-class variance).
624fn otsu_threshold(values: &[f64]) -> f64 {
625    if values.is_empty() {
626        return 0.5;
627    }
628
629    // Filter NaN values
630    let valid: Vec<f64> = values.iter().copied().filter(|x| x.is_finite()).collect();
631    if valid.is_empty() {
632        return 0.5;
633    }
634
635    let vmin = valid.iter().cloned().fold(f64::INFINITY, f64::min);
636    let vmax = valid.iter().cloned().fold(f64::NEG_INFINITY, f64::max);
637
638    if (vmax - vmin).abs() < 1e-10 {
639        return (vmin + vmax) / 2.0;
640    }
641
642    let n_bins = 256;
643    let (histogram, hist_min, bin_width) = build_histogram(&valid, n_bins);
644    let (best_bin, _) = find_optimal_threshold_bin(&histogram, valid.len() as f64);
645
646    hist_min + (best_bin as f64 + 0.5) * bin_width
647}
648
649/// Compute linear regression slope (simple OLS).
650fn linear_slope(x: &[f64], y: &[f64]) -> f64 {
651    if x.len() != y.len() || x.len() < 2 {
652        return 0.0;
653    }
654
655    let n = x.len() as f64;
656    let mean_x: f64 = x.iter().sum::<f64>() / n;
657    let mean_y: f64 = y.iter().sum::<f64>() / n;
658
659    let mut num = 0.0;
660    let mut den = 0.0;
661
662    for (&xi, &yi) in x.iter().zip(y.iter()) {
663        num += (xi - mean_x) * (yi - mean_y);
664        den += (xi - mean_x).powi(2);
665    }
666
667    if den.abs() < 1e-15 {
668        0.0
669    } else {
670        num / den
671    }
672}
673
674/// Statistics from amplitude envelope analysis (shared by Hilbert and wavelet methods).
675struct AmplitudeEnvelopeStats {
676    modulation_score: f64,
677    amplitude_trend: f64,
678    has_modulation: bool,
679    modulation_type: ModulationType,
680    _mean_amp: f64,
681    min_amp: f64,
682    max_amp: f64,
683}
684
685/// Analyze an amplitude envelope slice to compute modulation statistics.
686fn analyze_amplitude_envelope(
687    interior_envelope: &[f64],
688    interior_times: &[f64],
689    modulation_threshold: f64,
690) -> AmplitudeEnvelopeStats {
691    let n_interior = interior_envelope.len() as f64;
692
693    let mean_amp = interior_envelope.iter().sum::<f64>() / n_interior;
694    let min_amp = interior_envelope
695        .iter()
696        .cloned()
697        .fold(f64::INFINITY, f64::min);
698    let max_amp = interior_envelope
699        .iter()
700        .cloned()
701        .fold(f64::NEG_INFINITY, f64::max);
702
703    let variance = interior_envelope
704        .iter()
705        .map(|&a| (a - mean_amp).powi(2))
706        .sum::<f64>()
707        / n_interior;
708    let std_amp = variance.sqrt();
709    let modulation_score = if mean_amp > 1e-10 {
710        std_amp / mean_amp
711    } else {
712        0.0
713    };
714
715    let t_mean = interior_times.iter().sum::<f64>() / n_interior;
716    let mut cov_ta = 0.0;
717    let mut var_t = 0.0;
718    for (&t, &a) in interior_times.iter().zip(interior_envelope.iter()) {
719        cov_ta += (t - t_mean) * (a - mean_amp);
720        var_t += (t - t_mean).powi(2);
721    }
722    let slope = if var_t > 1e-10 { cov_ta / var_t } else { 0.0 };
723
724    let time_span = interior_times.last().unwrap_or(&1.0) - interior_times.first().unwrap_or(&0.0);
725    let amplitude_trend = if mean_amp > 1e-10 && time_span > 1e-10 {
726        (slope * time_span / mean_amp).clamp(-1.0, 1.0)
727    } else {
728        0.0
729    };
730
731    let has_modulation = modulation_score > modulation_threshold;
732    let modulation_type = if !has_modulation {
733        ModulationType::Stable
734    } else if amplitude_trend > 0.3 {
735        ModulationType::Emerging
736    } else if amplitude_trend < -0.3 {
737        ModulationType::Fading
738    } else {
739        ModulationType::Oscillating
740    };
741
742    AmplitudeEnvelopeStats {
743        modulation_score,
744        amplitude_trend,
745        has_modulation,
746        modulation_type,
747        _mean_amp: mean_amp,
748        min_amp,
749        max_amp,
750    }
751}
752
753/// Fit a sinusoid at the given period, subtract it from residual, and return (a, b, amplitude, phase).
754fn fit_and_subtract_sinusoid(
755    residual: &mut [f64],
756    argvals: &[f64],
757    period: f64,
758) -> (f64, f64, f64, f64) {
759    let m = residual.len();
760    let omega = 2.0 * PI / period;
761    let mut cos_sum = 0.0;
762    let mut sin_sum = 0.0;
763
764    for (j, &t) in argvals.iter().enumerate() {
765        cos_sum += residual[j] * (omega * t).cos();
766        sin_sum += residual[j] * (omega * t).sin();
767    }
768
769    let a = 2.0 * cos_sum / m as f64;
770    let b = 2.0 * sin_sum / m as f64;
771    let amplitude = (a * a + b * b).sqrt();
772    let phase = b.atan2(a);
773
774    for (j, &t) in argvals.iter().enumerate() {
775        residual[j] -= a * (omega * t).cos() + b * (omega * t).sin();
776    }
777
778    (a, b, amplitude, phase)
779}
780
781/// Validate a single SAZED component: returns Some(period) if it passes range and confidence checks.
782fn validate_sazed_component(
783    period: f64,
784    confidence: f64,
785    min_period: f64,
786    max_period: f64,
787    threshold: f64,
788) -> Option<f64> {
789    if period.is_finite() && period > min_period && period < max_period && confidence > threshold {
790        Some(period)
791    } else {
792        None
793    }
794}
795
796/// Count how many periods agree with a reference within tolerance, returning (count, sum).
797fn count_agreeing_periods(periods: &[f64], reference: f64, tolerance: f64) -> (usize, f64) {
798    let mut count = 0;
799    let mut sum = 0.0;
800    for &p in periods {
801        let rel_diff = (reference - p).abs() / reference.max(p);
802        if rel_diff <= tolerance {
803            count += 1;
804            sum += p;
805        }
806    }
807    (count, sum)
808}
809
810/// Find the end of the initial ACF descent (first negative or first uptick).
811fn find_acf_descent_end(acf: &[f64]) -> usize {
812    for i in 1..acf.len() {
813        if acf[i] < 0.0 {
814            return i;
815        }
816        if i > 1 && acf[i] > acf[i - 1] {
817            return i - 1;
818        }
819    }
820    1
821}
822
823/// Find the first ACF peak after initial descent. Returns Some((lag, acf_value)).
824fn find_first_acf_peak(acf: &[f64]) -> Option<(usize, f64)> {
825    if acf.len() < 4 {
826        return None;
827    }
828
829    let min_search_start = find_acf_descent_end(acf);
830    let peaks = find_peaks_1d(&acf[min_search_start..], 1);
831    if peaks.is_empty() {
832        return None;
833    }
834
835    let peak_lag = peaks[0] + min_search_start;
836    Some((peak_lag, acf[peak_lag].max(0.0)))
837}
838
839/// Compute per-cycle seasonal strengths and identify weak seasons.
840fn compute_cycle_strengths(
841    data: &[f64],
842    n: usize,
843    m: usize,
844    argvals: &[f64],
845    period: f64,
846    strength_thresh: f64,
847) -> (Vec<f64>, Vec<usize>) {
848    let t_start = argvals[0];
849    let t_end = argvals[m - 1];
850    let n_cycles = ((t_end - t_start) / period).floor() as usize;
851
852    let mut cycle_strengths = Vec::with_capacity(n_cycles);
853    let mut weak_seasons = Vec::new();
854
855    for cycle in 0..n_cycles {
856        let cycle_start = t_start + cycle as f64 * period;
857        let cycle_end = cycle_start + period;
858
859        let start_idx = argvals.iter().position(|&t| t >= cycle_start).unwrap_or(0);
860        let end_idx = argvals.iter().position(|&t| t > cycle_end).unwrap_or(m);
861
862        let cycle_m = end_idx - start_idx;
863        if cycle_m < 4 {
864            cycle_strengths.push(f64::NAN);
865            continue;
866        }
867
868        let cycle_data: Vec<f64> = (start_idx..end_idx)
869            .flat_map(|j| (0..n).map(move |i| data[i + j * n]))
870            .collect();
871        let cycle_argvals: Vec<f64> = argvals[start_idx..end_idx].to_vec();
872
873        let strength =
874            seasonal_strength_variance(&cycle_data, n, cycle_m, &cycle_argvals, period, 3);
875
876        cycle_strengths.push(strength);
877        if strength < strength_thresh {
878            weak_seasons.push(cycle);
879        }
880    }
881
882    (cycle_strengths, weak_seasons)
883}
884
885/// Build a histogram from valid values. Returns (histogram, min_val, bin_width).
886fn build_histogram(valid: &[f64], n_bins: usize) -> (Vec<usize>, f64, f64) {
887    let min_val = valid.iter().cloned().fold(f64::INFINITY, f64::min);
888    let max_val = valid.iter().cloned().fold(f64::NEG_INFINITY, f64::max);
889    let bin_width = (max_val - min_val) / n_bins as f64;
890    let mut histogram = vec![0usize; n_bins];
891    for &v in valid {
892        let bin = ((v - min_val) / bin_width).min(n_bins as f64 - 1.0) as usize;
893        histogram[bin] += 1;
894    }
895    (histogram, min_val, bin_width)
896}
897
898/// Find the optimal threshold bin using Otsu's between-class variance. Returns (best_bin, best_variance).
899fn find_optimal_threshold_bin(histogram: &[usize], total: f64) -> (usize, f64) {
900    let n_bins = histogram.len();
901    let mut sum_total = 0.0;
902    for (i, &count) in histogram.iter().enumerate() {
903        sum_total += i as f64 * count as f64;
904    }
905
906    let mut best_bin = 0;
907    let mut best_variance = 0.0;
908    let mut sum_b = 0.0;
909    let mut weight_b = 0.0;
910
911    for t in 0..n_bins {
912        weight_b += histogram[t] as f64;
913        if weight_b == 0.0 {
914            continue;
915        }
916        let weight_f = total - weight_b;
917        if weight_f == 0.0 {
918            break;
919        }
920        sum_b += t as f64 * histogram[t] as f64;
921        let mean_b = sum_b / weight_b;
922        let mean_f = (sum_total - sum_b) / weight_f;
923        let variance = weight_b * weight_f * (mean_b - mean_f).powi(2);
924        if variance > best_variance {
925            best_variance = variance;
926            best_bin = t;
927        }
928    }
929
930    (best_bin, best_variance)
931}
932
933/// Sum power at harmonics of a fundamental frequency within tolerance.
934fn sum_harmonic_power(
935    frequencies: &[f64],
936    power: &[f64],
937    fundamental_freq: f64,
938    tolerance: f64,
939) -> (f64, f64) {
940    let mut seasonal_power = 0.0;
941    let mut total_power = 0.0;
942
943    for (i, (&freq, &p)) in frequencies.iter().zip(power.iter()).enumerate() {
944        if i == 0 {
945            continue;
946        }
947        total_power += p;
948        let ratio = freq / fundamental_freq;
949        let nearest_harmonic = ratio.round();
950        if (ratio - nearest_harmonic).abs() < tolerance && nearest_harmonic >= 1.0 {
951            seasonal_power += p;
952        }
953    }
954
955    (seasonal_power, total_power)
956}
957
958/// Return the new seasonal state if `ss` represents a valid threshold crossing,
959/// or `None` if the index should be skipped (NaN, no change, or too close to the
960/// previous change point).
961fn crossing_direction(
962    ss: f64,
963    threshold: f64,
964    in_seasonal: bool,
965    i: usize,
966    last_change_idx: Option<usize>,
967    min_dur_points: usize,
968) -> Option<bool> {
969    if ss.is_nan() {
970        return None;
971    }
972    let now_seasonal = ss > threshold;
973    if now_seasonal == in_seasonal {
974        return None;
975    }
976    if last_change_idx.is_some_and(|last_idx| i - last_idx < min_dur_points) {
977        return None;
978    }
979    Some(now_seasonal)
980}
981
982/// Build a `ChangePoint` for a threshold crossing at index `i`.
983fn build_change_point(
984    i: usize,
985    ss: f64,
986    now_seasonal: bool,
987    strength_curve: &[f64],
988    argvals: &[f64],
989) -> ChangePoint {
990    let change_type = if now_seasonal {
991        ChangeType::Onset
992    } else {
993        ChangeType::Cessation
994    };
995    let strength_before = if i > 0 && !strength_curve[i - 1].is_nan() {
996        strength_curve[i - 1]
997    } else {
998        ss
999    };
1000    ChangePoint {
1001        time: argvals[i],
1002        change_type,
1003        strength_before,
1004        strength_after: ss,
1005    }
1006}
1007
1008/// Detect threshold crossings in a strength curve, returning change points.
1009fn detect_threshold_crossings(
1010    strength_curve: &[f64],
1011    argvals: &[f64],
1012    threshold: f64,
1013    min_dur_points: usize,
1014) -> Vec<ChangePoint> {
1015    let mut change_points = Vec::new();
1016    let mut in_seasonal = strength_curve[0] > threshold;
1017    let mut last_change_idx: Option<usize> = None;
1018
1019    for (i, &ss) in strength_curve.iter().enumerate().skip(1) {
1020        let Some(now_seasonal) = crossing_direction(
1021            ss,
1022            threshold,
1023            in_seasonal,
1024            i,
1025            last_change_idx,
1026            min_dur_points,
1027        ) else {
1028            continue;
1029        };
1030
1031        change_points.push(build_change_point(
1032            i,
1033            ss,
1034            now_seasonal,
1035            strength_curve,
1036            argvals,
1037        ));
1038
1039        in_seasonal = now_seasonal;
1040        last_change_idx = Some(i);
1041    }
1042
1043    change_points
1044}
1045
1046// ============================================================================
1047// Period Estimation
1048// ============================================================================
1049
1050/// Estimate period using FFT periodogram.
1051///
1052/// Finds the dominant frequency in the periodogram (excluding DC) and
1053/// returns the corresponding period.
1054///
1055/// # Arguments
1056/// * `data` - Column-major matrix (n x m)
1057/// * `n` - Number of samples
1058/// * `m` - Number of evaluation points
1059/// * `argvals` - Evaluation points (time values)
1060///
1061/// # Returns
1062/// Period estimate with confidence measure
1063pub fn estimate_period_fft(data: &[f64], n: usize, m: usize, argvals: &[f64]) -> PeriodEstimate {
1064    if n == 0 || m < 4 || argvals.len() != m {
1065        return PeriodEstimate {
1066            period: f64::NAN,
1067            frequency: f64::NAN,
1068            power: 0.0,
1069            confidence: 0.0,
1070        };
1071    }
1072
1073    // Compute mean curve first
1074    let mean_curve = compute_mean_curve(data, n, m);
1075
1076    let (frequencies, power) = periodogram(&mean_curve, argvals);
1077
1078    if frequencies.len() < 2 {
1079        return PeriodEstimate {
1080            period: f64::NAN,
1081            frequency: f64::NAN,
1082            power: 0.0,
1083            confidence: 0.0,
1084        };
1085    }
1086
1087    // Find peak in power spectrum (skip DC component at index 0)
1088    let mut max_power = 0.0;
1089    let mut max_idx = 1;
1090    for (i, &p) in power.iter().enumerate().skip(1) {
1091        if p > max_power {
1092            max_power = p;
1093            max_idx = i;
1094        }
1095    }
1096
1097    let dominant_freq = frequencies[max_idx];
1098    let period = if dominant_freq > 1e-15 {
1099        1.0 / dominant_freq
1100    } else {
1101        f64::INFINITY
1102    };
1103
1104    // Confidence: ratio of peak power to mean power (excluding DC)
1105    let mean_power: f64 = power.iter().skip(1).sum::<f64>() / (power.len() - 1) as f64;
1106    let confidence = if mean_power > 1e-15 {
1107        max_power / mean_power
1108    } else {
1109        0.0
1110    };
1111
1112    PeriodEstimate {
1113        period,
1114        frequency: dominant_freq,
1115        power: max_power,
1116        confidence,
1117    }
1118}
1119
1120/// Estimate period using autocorrelation function.
1121///
1122/// Finds the first significant peak in the ACF after lag 0.
1123///
1124/// # Arguments
1125/// * `data` - Column-major matrix (n x m)
1126/// * `n` - Number of samples
1127/// * `m` - Number of evaluation points
1128/// * `argvals` - Evaluation points
1129/// * `max_lag` - Maximum lag to consider (in number of points)
1130pub fn estimate_period_acf(
1131    data: &[f64],
1132    n: usize,
1133    m: usize,
1134    argvals: &[f64],
1135    max_lag: usize,
1136) -> PeriodEstimate {
1137    if n == 0 || m < 4 || argvals.len() != m {
1138        return PeriodEstimate {
1139            period: f64::NAN,
1140            frequency: f64::NAN,
1141            power: 0.0,
1142            confidence: 0.0,
1143        };
1144    }
1145
1146    // Compute mean curve
1147    let mean_curve = compute_mean_curve(data, n, m);
1148
1149    let acf = autocorrelation(&mean_curve, max_lag);
1150
1151    // Find first peak after lag 0 (skip first few lags to avoid finding lag 0)
1152    let min_lag = 2;
1153    let peaks = find_peaks_1d(&acf[min_lag..], 1);
1154
1155    if peaks.is_empty() {
1156        return PeriodEstimate {
1157            period: f64::NAN,
1158            frequency: f64::NAN,
1159            power: 0.0,
1160            confidence: 0.0,
1161        };
1162    }
1163
1164    let peak_lag = peaks[0] + min_lag;
1165    let dt = (argvals[m - 1] - argvals[0]) / (m - 1) as f64;
1166    let period = peak_lag as f64 * dt;
1167    let frequency = if period > 1e-15 { 1.0 / period } else { 0.0 };
1168
1169    PeriodEstimate {
1170        period,
1171        frequency,
1172        power: acf[peak_lag],
1173        confidence: acf[peak_lag].abs(),
1174    }
1175}
1176
1177/// Estimate period via Fourier regression grid search.
1178///
1179/// Tests multiple candidate periods and selects the one that minimizes
1180/// the reconstruction error (similar to GCV).
1181///
1182/// # Arguments
1183/// * `data` - Column-major matrix (n x m)
1184/// * `n` - Number of samples
1185/// * `m` - Number of evaluation points
1186/// * `argvals` - Evaluation points
1187/// * `period_min` - Minimum period to test
1188/// * `period_max` - Maximum period to test
1189/// * `n_candidates` - Number of candidate periods to test
1190/// * `n_harmonics` - Number of Fourier harmonics to use
1191pub fn estimate_period_regression(
1192    data: &[f64],
1193    n: usize,
1194    m: usize,
1195    argvals: &[f64],
1196    period_min: f64,
1197    period_max: f64,
1198    n_candidates: usize,
1199    n_harmonics: usize,
1200) -> PeriodEstimate {
1201    if n == 0 || m < 4 || argvals.len() != m || period_min >= period_max || n_candidates < 2 {
1202        return PeriodEstimate {
1203            period: f64::NAN,
1204            frequency: f64::NAN,
1205            power: 0.0,
1206            confidence: 0.0,
1207        };
1208    }
1209
1210    // Compute mean curve
1211    let mean_curve = compute_mean_curve(data, n, m);
1212
1213    let nbasis = 1 + 2 * n_harmonics;
1214
1215    // Grid search over candidate periods
1216    let candidates: Vec<f64> = (0..n_candidates)
1217        .map(|i| period_min + (period_max - period_min) * i as f64 / (n_candidates - 1) as f64)
1218        .collect();
1219
1220    let results: Vec<(f64, f64)> = slice_maybe_parallel!(candidates)
1221        .map(|&period| {
1222            let basis = fourier_basis_with_period(argvals, nbasis, period);
1223
1224            // Simple least squares fit
1225            let mut rss = 0.0;
1226            for j in 0..m {
1227                let mut fitted = 0.0;
1228                // Simple: use mean of basis function times data as rough fit
1229                for k in 0..nbasis {
1230                    let b_val = basis[j + k * m];
1231                    let coef: f64 = (0..m)
1232                        .map(|l| mean_curve[l] * basis[l + k * m])
1233                        .sum::<f64>()
1234                        / (0..m)
1235                            .map(|l| basis[l + k * m].powi(2))
1236                            .sum::<f64>()
1237                            .max(1e-15);
1238                    fitted += coef * b_val;
1239                }
1240                let resid = mean_curve[j] - fitted;
1241                rss += resid * resid;
1242            }
1243
1244            (period, rss)
1245        })
1246        .collect();
1247
1248    // Find period with minimum RSS
1249    let (best_period, min_rss) = results
1250        .iter()
1251        .min_by(|a, b| a.1.partial_cmp(&b.1).unwrap_or(std::cmp::Ordering::Equal))
1252        .cloned()
1253        .unwrap_or((f64::NAN, f64::INFINITY));
1254
1255    // Confidence based on how much better the best is vs average
1256    let mean_rss: f64 = results.iter().map(|(_, r)| r).sum::<f64>() / results.len() as f64;
1257    let confidence = if min_rss > 1e-15 {
1258        (mean_rss / min_rss).min(10.0)
1259    } else {
1260        10.0
1261    };
1262
1263    PeriodEstimate {
1264        period: best_period,
1265        frequency: if best_period > 1e-15 {
1266            1.0 / best_period
1267        } else {
1268            0.0
1269        },
1270        power: 1.0 - min_rss / mean_rss,
1271        confidence,
1272    }
1273}
1274
1275/// Detect multiple concurrent periodicities using iterative residual subtraction.
1276///
1277/// This function iteratively:
1278/// 1. Estimates the dominant period using FFT
1279/// 2. Checks both FFT confidence and seasonal strength as stopping criteria
1280/// 3. Computes the amplitude and phase of the sinusoidal component
1281/// 4. Subtracts the fitted sinusoid from the signal
1282/// 5. Repeats on the residual until stopping criteria are met
1283///
1284/// # Arguments
1285/// * `data` - Column-major matrix (n x m)
1286/// * `n` - Number of samples
1287/// * `m` - Number of evaluation points
1288/// * `argvals` - Evaluation points
1289/// * `max_periods` - Maximum number of periods to detect
1290/// * `min_confidence` - Minimum FFT confidence to continue (default: 0.4)
1291/// * `min_strength` - Minimum seasonal strength to continue (default: 0.15)
1292///
1293/// # Returns
1294/// Vector of detected periods with their properties
1295pub fn detect_multiple_periods(
1296    data: &[f64],
1297    n: usize,
1298    m: usize,
1299    argvals: &[f64],
1300    max_periods: usize,
1301    min_confidence: f64,
1302    min_strength: f64,
1303) -> Vec<DetectedPeriod> {
1304    if n == 0 || m < 4 || argvals.len() != m || max_periods == 0 {
1305        return Vec::new();
1306    }
1307
1308    // Compute mean curve
1309    let mean_curve = compute_mean_curve(data, n, m);
1310
1311    let mut residual = mean_curve.clone();
1312    let mut detected = Vec::with_capacity(max_periods);
1313
1314    for iteration in 1..=max_periods {
1315        match evaluate_next_period(
1316            &mut residual,
1317            m,
1318            argvals,
1319            min_confidence,
1320            min_strength,
1321            iteration,
1322        ) {
1323            Some(period) => detected.push(period),
1324            None => break,
1325        }
1326    }
1327
1328    detected
1329}
1330
1331/// Evaluate and extract the next dominant period from the residual signal.
1332///
1333/// Returns `None` if no significant period is found (signals iteration should stop).
1334fn evaluate_next_period(
1335    residual: &mut [f64],
1336    m: usize,
1337    argvals: &[f64],
1338    min_confidence: f64,
1339    min_strength: f64,
1340    iteration: usize,
1341) -> Option<DetectedPeriod> {
1342    let residual_data: Vec<f64> = residual.to_vec();
1343    let est = estimate_period_fft(&residual_data, 1, m, argvals);
1344
1345    if est.confidence < min_confidence || est.period.is_nan() || est.period.is_infinite() {
1346        return None;
1347    }
1348
1349    let strength = seasonal_strength_variance(&residual_data, 1, m, argvals, est.period, 3);
1350    if strength < min_strength || strength.is_nan() {
1351        return None;
1352    }
1353
1354    let (_a, _b, amplitude, phase) = fit_and_subtract_sinusoid(residual, argvals, est.period);
1355
1356    Some(DetectedPeriod {
1357        period: est.period,
1358        confidence: est.confidence,
1359        strength,
1360        amplitude,
1361        phase,
1362        iteration,
1363    })
1364}
1365
1366// ============================================================================
1367// Peak Detection
1368// ============================================================================
1369
1370/// Optionally smooth data using Fourier basis before peak detection.
1371fn smooth_for_peaks(
1372    data: &[f64],
1373    n: usize,
1374    m: usize,
1375    argvals: &[f64],
1376    smooth_first: bool,
1377    smooth_nbasis: Option<usize>,
1378) -> Vec<f64> {
1379    if !smooth_first {
1380        return data.to_vec();
1381    }
1382    let nbasis = smooth_nbasis
1383        .unwrap_or_else(|| crate::basis::select_fourier_nbasis_gcv(data, n, m, argvals, 5, 25));
1384    if let Some(result) = crate::basis::fourier_fit_1d(data, n, m, argvals, nbasis) {
1385        result.fitted
1386    } else {
1387        data.to_vec()
1388    }
1389}
1390
1391/// Detect peaks in a single curve using derivative zero-crossings.
1392fn detect_peaks_single_curve(
1393    curve: &[f64],
1394    d1: &[f64],
1395    argvals: &[f64],
1396    min_dist_points: usize,
1397    min_prominence: Option<f64>,
1398    data_range: f64,
1399) -> (Vec<Peak>, Vec<f64>) {
1400    let m = curve.len();
1401    let mut peak_indices = Vec::new();
1402    for j in 1..m {
1403        if d1[j - 1] > 0.0 && d1[j] <= 0.0 {
1404            let idx = if (d1[j - 1] - d1[j]).abs() > 1e-15 {
1405                j - 1
1406            } else {
1407                j
1408            };
1409
1410            if peak_indices.is_empty()
1411                || idx - peak_indices[peak_indices.len() - 1] >= min_dist_points
1412            {
1413                peak_indices.push(idx);
1414            }
1415        }
1416    }
1417
1418    let mut peaks: Vec<Peak> = peak_indices
1419        .iter()
1420        .map(|&idx| {
1421            let prominence = compute_prominence(curve, idx) / data_range;
1422            Peak {
1423                time: argvals[idx],
1424                value: curve[idx],
1425                prominence,
1426            }
1427        })
1428        .collect();
1429
1430    if let Some(min_prom) = min_prominence {
1431        peaks.retain(|p| p.prominence >= min_prom);
1432    }
1433
1434    let distances: Vec<f64> = peaks.windows(2).map(|w| w[1].time - w[0].time).collect();
1435
1436    (peaks, distances)
1437}
1438
1439/// Detect peaks in functional data.
1440///
1441/// Uses derivative zero-crossings to find local maxima, with optional
1442/// Fourier basis smoothing and filtering by minimum distance and prominence.
1443///
1444/// # Arguments
1445/// * `data` - Column-major matrix (n x m)
1446/// * `n` - Number of samples
1447/// * `m` - Number of evaluation points
1448/// * `argvals` - Evaluation points
1449/// * `min_distance` - Minimum time between peaks (None = no constraint)
1450/// * `min_prominence` - Minimum prominence (0-1 scale, None = no filter)
1451/// * `smooth_first` - Whether to smooth data before peak detection using Fourier basis
1452/// * `smooth_nbasis` - Number of Fourier basis functions. If None and smooth_first=true,
1453///   uses GCV to automatically select optimal nbasis (range 5-25).
1454pub fn detect_peaks(
1455    data: &[f64],
1456    n: usize,
1457    m: usize,
1458    argvals: &[f64],
1459    min_distance: Option<f64>,
1460    min_prominence: Option<f64>,
1461    smooth_first: bool,
1462    smooth_nbasis: Option<usize>,
1463) -> PeakDetectionResult {
1464    if n == 0 || m < 3 || argvals.len() != m {
1465        return PeakDetectionResult {
1466            peaks: Vec::new(),
1467            inter_peak_distances: Vec::new(),
1468            mean_period: f64::NAN,
1469        };
1470    }
1471
1472    let dt = (argvals[m - 1] - argvals[0]) / (m - 1) as f64;
1473    let min_dist_points = min_distance.map(|d| (d / dt).round() as usize).unwrap_or(1);
1474
1475    let work_data = smooth_for_peaks(data, n, m, argvals, smooth_first, smooth_nbasis);
1476
1477    // Compute first derivative
1478    let deriv1 = deriv_1d(&work_data, n, m, argvals, 1);
1479
1480    // Compute data range for prominence normalization
1481    let data_range: f64 = {
1482        let mut min_val = f64::INFINITY;
1483        let mut max_val = f64::NEG_INFINITY;
1484        for &v in work_data.iter() {
1485            min_val = min_val.min(v);
1486            max_val = max_val.max(v);
1487        }
1488        (max_val - min_val).max(1e-15)
1489    };
1490
1491    // Find peaks for each sample
1492    let results: Vec<(Vec<Peak>, Vec<f64>)> = iter_maybe_parallel!(0..n)
1493        .map(|i| {
1494            let curve: Vec<f64> = (0..m).map(|j| work_data[i + j * n]).collect();
1495            let d1: Vec<f64> = (0..m).map(|j| deriv1[i + j * n]).collect();
1496            detect_peaks_single_curve(
1497                &curve,
1498                &d1,
1499                argvals,
1500                min_dist_points,
1501                min_prominence,
1502                data_range,
1503            )
1504        })
1505        .collect();
1506
1507    let peaks: Vec<Vec<Peak>> = results.iter().map(|(p, _)| p.clone()).collect();
1508    let inter_peak_distances: Vec<Vec<f64>> = results.iter().map(|(_, d)| d.clone()).collect();
1509
1510    // Compute mean period from all inter-peak distances
1511    let all_distances: Vec<f64> = inter_peak_distances.iter().flatten().cloned().collect();
1512    let mean_period = if all_distances.is_empty() {
1513        f64::NAN
1514    } else {
1515        all_distances.iter().sum::<f64>() / all_distances.len() as f64
1516    };
1517
1518    PeakDetectionResult {
1519        peaks,
1520        inter_peak_distances,
1521        mean_period,
1522    }
1523}
1524
1525// ============================================================================
1526// Seasonal Strength
1527// ============================================================================
1528
1529/// Measure seasonal strength using variance decomposition.
1530///
1531/// Computes SS = Var(seasonal_component) / Var(total) where the seasonal
1532/// component is extracted using Fourier basis.
1533///
1534/// # Arguments
1535/// * `data` - Column-major matrix (n x m)
1536/// * `n` - Number of samples
1537/// * `m` - Number of evaluation points
1538/// * `argvals` - Evaluation points
1539/// * `period` - Seasonal period
1540/// * `n_harmonics` - Number of Fourier harmonics to use
1541///
1542/// # Returns
1543/// Seasonal strength in [0, 1]
1544pub fn seasonal_strength_variance(
1545    data: &[f64],
1546    n: usize,
1547    m: usize,
1548    argvals: &[f64],
1549    period: f64,
1550    n_harmonics: usize,
1551) -> f64 {
1552    if n == 0 || m < 4 || argvals.len() != m || period <= 0.0 {
1553        return f64::NAN;
1554    }
1555
1556    // Compute mean curve
1557    let mean_curve = compute_mean_curve(data, n, m);
1558
1559    // Total variance
1560    let global_mean: f64 = mean_curve.iter().sum::<f64>() / m as f64;
1561    let total_var: f64 = mean_curve
1562        .iter()
1563        .map(|&x| (x - global_mean).powi(2))
1564        .sum::<f64>()
1565        / m as f64;
1566
1567    if total_var < 1e-15 {
1568        return 0.0;
1569    }
1570
1571    // Fit Fourier basis to extract seasonal component
1572    let nbasis = 1 + 2 * n_harmonics;
1573    let basis = fourier_basis_with_period(argvals, nbasis, period);
1574
1575    // Project data onto basis (simple least squares for mean curve)
1576    let mut seasonal = vec![0.0; m];
1577    for k in 1..nbasis {
1578        // Skip DC component
1579        let b_sum: f64 = (0..m).map(|j| basis[j + k * m].powi(2)).sum();
1580        if b_sum > 1e-15 {
1581            let coef: f64 = (0..m)
1582                .map(|j| mean_curve[j] * basis[j + k * m])
1583                .sum::<f64>()
1584                / b_sum;
1585            for j in 0..m {
1586                seasonal[j] += coef * basis[j + k * m];
1587            }
1588        }
1589    }
1590
1591    // Seasonal variance
1592    let seasonal_mean: f64 = seasonal.iter().sum::<f64>() / m as f64;
1593    let seasonal_var: f64 = seasonal
1594        .iter()
1595        .map(|&x| (x - seasonal_mean).powi(2))
1596        .sum::<f64>()
1597        / m as f64;
1598
1599    (seasonal_var / total_var).min(1.0)
1600}
1601
1602/// Measure seasonal strength using spectral method.
1603///
1604/// Computes SS = power at seasonal frequencies / total power.
1605///
1606/// # Arguments
1607/// * `data` - Column-major matrix (n x m)
1608/// * `n` - Number of samples
1609/// * `m` - Number of evaluation points
1610/// * `argvals` - Evaluation points
1611/// * `period` - Seasonal period
1612pub fn seasonal_strength_spectral(
1613    data: &[f64],
1614    n: usize,
1615    m: usize,
1616    argvals: &[f64],
1617    period: f64,
1618) -> f64 {
1619    if n == 0 || m < 4 || argvals.len() != m || period <= 0.0 {
1620        return f64::NAN;
1621    }
1622
1623    // Compute mean curve
1624    let mean_curve = compute_mean_curve(data, n, m);
1625
1626    let (frequencies, power) = periodogram(&mean_curve, argvals);
1627
1628    if frequencies.len() < 2 {
1629        return f64::NAN;
1630    }
1631
1632    let fundamental_freq = 1.0 / period;
1633    let (seasonal_power, total_power) =
1634        sum_harmonic_power(&frequencies, &power, fundamental_freq, 0.1);
1635
1636    if total_power < 1e-15 {
1637        return 0.0;
1638    }
1639
1640    (seasonal_power / total_power).min(1.0)
1641}
1642
1643/// Compute seasonal strength using Morlet wavelet power at the target period.
1644///
1645/// This method uses the Continuous Wavelet Transform (CWT) with a Morlet wavelet
1646/// to measure power at the specified seasonal period. Unlike spectral methods,
1647/// wavelets provide time-localized frequency information.
1648///
1649/// # Arguments
1650/// * `data` - Column-major matrix (n x m)
1651/// * `n` - Number of samples
1652/// * `m` - Number of evaluation points
1653/// * `argvals` - Evaluation points
1654/// * `period` - Seasonal period in argvals units
1655///
1656/// # Returns
1657/// Seasonal strength as ratio of wavelet power to total variance (0 to 1)
1658///
1659/// # Notes
1660/// - Uses Morlet wavelet with ω₀ = 6 (standard choice)
1661/// - Scale is computed as: scale = period * ω₀ / (2π)
1662/// - Strength is computed over the interior 80% of the signal to avoid edge effects
1663pub fn seasonal_strength_wavelet(
1664    data: &[f64],
1665    n: usize,
1666    m: usize,
1667    argvals: &[f64],
1668    period: f64,
1669) -> f64 {
1670    if n == 0 || m < 4 || argvals.len() != m || period <= 0.0 {
1671        return f64::NAN;
1672    }
1673
1674    // Compute mean curve
1675    let mean_curve = compute_mean_curve(data, n, m);
1676
1677    // Remove DC component
1678    let dc: f64 = mean_curve.iter().sum::<f64>() / m as f64;
1679    let detrended: Vec<f64> = mean_curve.iter().map(|&x| x - dc).collect();
1680
1681    // Compute total variance
1682    let total_variance: f64 = detrended.iter().map(|&x| x * x).sum::<f64>() / m as f64;
1683
1684    if total_variance < 1e-15 {
1685        return 0.0;
1686    }
1687
1688    // Compute wavelet transform at the seasonal scale
1689    let omega0 = 6.0;
1690    let scale = period * omega0 / (2.0 * PI);
1691    let wavelet_coeffs = cwt_morlet_fft(&detrended, argvals, scale, omega0);
1692
1693    if wavelet_coeffs.is_empty() {
1694        return f64::NAN;
1695    }
1696
1697    // Compute wavelet power, skipping edges (10% on each side)
1698    let (interior_start, interior_end) = match interior_bounds(m) {
1699        Some(bounds) => bounds,
1700        None => return f64::NAN,
1701    };
1702
1703    let wavelet_power: f64 = wavelet_coeffs[interior_start..interior_end]
1704        .iter()
1705        .map(|c| c.norm_sqr())
1706        .sum::<f64>()
1707        / (interior_end - interior_start) as f64;
1708
1709    // Return ratio of wavelet power to total variance
1710    // Normalize so that a pure sine at the target period gives ~1.0
1711    (wavelet_power / total_variance).sqrt().min(1.0)
1712}
1713
1714/// Compute time-varying seasonal strength using sliding windows.
1715///
1716/// # Arguments
1717/// * `data` - Column-major matrix (n x m)
1718/// * `n` - Number of samples
1719/// * `m` - Number of evaluation points
1720/// * `argvals` - Evaluation points
1721/// * `period` - Seasonal period
1722/// * `window_size` - Window width (recommended: 2 * period)
1723/// * `method` - Method for computing strength (Variance or Spectral)
1724///
1725/// # Returns
1726/// Seasonal strength at each time point
1727pub fn seasonal_strength_windowed(
1728    data: &[f64],
1729    n: usize,
1730    m: usize,
1731    argvals: &[f64],
1732    period: f64,
1733    window_size: f64,
1734    method: StrengthMethod,
1735) -> Vec<f64> {
1736    if n == 0 || m < 4 || argvals.len() != m || period <= 0.0 || window_size <= 0.0 {
1737        return Vec::new();
1738    }
1739
1740    let dt = (argvals[m - 1] - argvals[0]) / (m - 1) as f64;
1741    let half_window_points = ((window_size / 2.0) / dt).round() as usize;
1742
1743    // Compute mean curve
1744    let mean_curve = compute_mean_curve(data, n, m);
1745
1746    iter_maybe_parallel!(0..m)
1747        .map(|center| {
1748            let start = center.saturating_sub(half_window_points);
1749            let end = (center + half_window_points + 1).min(m);
1750            let window_m = end - start;
1751
1752            if window_m < 4 {
1753                return f64::NAN;
1754            }
1755
1756            let window_data: Vec<f64> = mean_curve[start..end].to_vec();
1757            let window_argvals: Vec<f64> = argvals[start..end].to_vec();
1758
1759            // Create single-sample data for the strength functions
1760            let single_data = window_data.clone();
1761
1762            match method {
1763                StrengthMethod::Variance => seasonal_strength_variance(
1764                    &single_data,
1765                    1,
1766                    window_m,
1767                    &window_argvals,
1768                    period,
1769                    3,
1770                ),
1771                StrengthMethod::Spectral => {
1772                    seasonal_strength_spectral(&single_data, 1, window_m, &window_argvals, period)
1773                }
1774            }
1775        })
1776        .collect()
1777}
1778
1779// ============================================================================
1780// Seasonality Change Detection
1781// ============================================================================
1782
1783/// Detect changes in seasonality.
1784///
1785/// Monitors time-varying seasonal strength and detects threshold crossings
1786/// that indicate onset or cessation of seasonality.
1787///
1788/// # Arguments
1789/// * `data` - Column-major matrix (n x m)
1790/// * `n` - Number of samples
1791/// * `m` - Number of evaluation points
1792/// * `argvals` - Evaluation points
1793/// * `period` - Seasonal period
1794/// * `threshold` - SS threshold for seasonal/non-seasonal (e.g., 0.3)
1795/// * `window_size` - Window size for local strength estimation
1796/// * `min_duration` - Minimum duration to confirm a change
1797pub fn detect_seasonality_changes(
1798    data: &[f64],
1799    n: usize,
1800    m: usize,
1801    argvals: &[f64],
1802    period: f64,
1803    threshold: f64,
1804    window_size: f64,
1805    min_duration: f64,
1806) -> ChangeDetectionResult {
1807    if n == 0 || m < 4 || argvals.len() != m {
1808        return ChangeDetectionResult {
1809            change_points: Vec::new(),
1810            strength_curve: Vec::new(),
1811        };
1812    }
1813
1814    // Compute time-varying seasonal strength
1815    let strength_curve = seasonal_strength_windowed(
1816        data,
1817        n,
1818        m,
1819        argvals,
1820        period,
1821        window_size,
1822        StrengthMethod::Variance,
1823    );
1824
1825    if strength_curve.is_empty() {
1826        return ChangeDetectionResult {
1827            change_points: Vec::new(),
1828            strength_curve: Vec::new(),
1829        };
1830    }
1831
1832    let dt = (argvals[m - 1] - argvals[0]) / (m - 1) as f64;
1833    let min_dur_points = (min_duration / dt).round() as usize;
1834
1835    let change_points =
1836        detect_threshold_crossings(&strength_curve, argvals, threshold, min_dur_points);
1837
1838    ChangeDetectionResult {
1839        change_points,
1840        strength_curve,
1841    }
1842}
1843
1844// ============================================================================
1845// Amplitude Modulation Detection
1846// ============================================================================
1847
1848/// Detect amplitude modulation in seasonal time series.
1849///
1850/// This function first checks if seasonality exists using the spectral method
1851/// (which is robust to amplitude modulation), then uses Hilbert transform to
1852/// extract the amplitude envelope and analyze modulation patterns.
1853///
1854/// # Arguments
1855/// * `data` - Column-major matrix (n x m)
1856/// * `n` - Number of samples
1857/// * `m` - Number of evaluation points
1858/// * `argvals` - Evaluation points
1859/// * `period` - Seasonal period in argvals units
1860/// * `modulation_threshold` - CV threshold for detecting modulation (default: 0.15)
1861/// * `seasonality_threshold` - Strength threshold for seasonality (default: 0.3)
1862///
1863/// # Returns
1864/// `AmplitudeModulationResult` containing detection results and diagnostics
1865///
1866/// # Example
1867/// ```ignore
1868/// let result = detect_amplitude_modulation(
1869///     &data, n, m, &argvals,
1870///     period,
1871///     0.15,          // CV > 0.15 indicates modulation
1872///     0.3,           // strength > 0.3 indicates seasonality
1873/// );
1874/// if result.has_modulation {
1875///     match result.modulation_type {
1876///         ModulationType::Emerging => println!("Seasonality is emerging"),
1877///         ModulationType::Fading => println!("Seasonality is fading"),
1878///         _ => {}
1879///     }
1880/// }
1881/// ```
1882pub fn detect_amplitude_modulation(
1883    data: &[f64],
1884    n: usize,
1885    m: usize,
1886    argvals: &[f64],
1887    period: f64,
1888    modulation_threshold: f64,
1889    seasonality_threshold: f64,
1890) -> AmplitudeModulationResult {
1891    // Default result for invalid input
1892    let empty_result = AmplitudeModulationResult {
1893        is_seasonal: false,
1894        seasonal_strength: 0.0,
1895        has_modulation: false,
1896        modulation_type: ModulationType::NonSeasonal,
1897        modulation_score: 0.0,
1898        amplitude_trend: 0.0,
1899        strength_curve: Vec::new(),
1900        time_points: Vec::new(),
1901        min_strength: 0.0,
1902        max_strength: 0.0,
1903    };
1904
1905    if n == 0 || m < 4 || argvals.len() != m || period <= 0.0 {
1906        return empty_result;
1907    }
1908
1909    // Step 1: Check if seasonality exists using spectral method (robust to AM)
1910    let overall_strength = seasonal_strength_spectral(data, n, m, argvals, period);
1911
1912    if overall_strength < seasonality_threshold {
1913        return AmplitudeModulationResult {
1914            is_seasonal: false,
1915            seasonal_strength: overall_strength,
1916            has_modulation: false,
1917            modulation_type: ModulationType::NonSeasonal,
1918            modulation_score: 0.0,
1919            amplitude_trend: 0.0,
1920            strength_curve: Vec::new(),
1921            time_points: argvals.to_vec(),
1922            min_strength: 0.0,
1923            max_strength: 0.0,
1924        };
1925    }
1926
1927    // Step 2: Compute mean curve
1928    let mean_curve = compute_mean_curve(data, n, m);
1929
1930    // Step 3: Use Hilbert transform to get amplitude envelope
1931    let dc: f64 = mean_curve.iter().sum::<f64>() / m as f64;
1932    let detrended: Vec<f64> = mean_curve.iter().map(|&x| x - dc).collect();
1933    let analytic = hilbert_transform(&detrended);
1934    let envelope: Vec<f64> = analytic.iter().map(|c| c.norm()).collect();
1935
1936    if envelope.is_empty() {
1937        return AmplitudeModulationResult {
1938            is_seasonal: true,
1939            seasonal_strength: overall_strength,
1940            has_modulation: false,
1941            modulation_type: ModulationType::Stable,
1942            modulation_score: 0.0,
1943            amplitude_trend: 0.0,
1944            strength_curve: Vec::new(),
1945            time_points: argvals.to_vec(),
1946            min_strength: 0.0,
1947            max_strength: 0.0,
1948        };
1949    }
1950
1951    // Step 4: Smooth the envelope to reduce high-frequency noise
1952    let dt = (argvals[m - 1] - argvals[0]) / (m - 1) as f64;
1953    let smooth_window = ((period / dt) as usize).max(3);
1954    let half_window = smooth_window / 2;
1955
1956    let smoothed_envelope: Vec<f64> = (0..m)
1957        .map(|i| {
1958            let start = i.saturating_sub(half_window);
1959            let end = (i + half_window + 1).min(m);
1960            let sum: f64 = envelope[start..end].iter().sum();
1961            sum / (end - start) as f64
1962        })
1963        .collect();
1964
1965    // Step 5: Analyze envelope statistics
1966    let Some((interior_start, interior_end)) = valid_interior_bounds(m, 4) else {
1967        return AmplitudeModulationResult {
1968            is_seasonal: true,
1969            seasonal_strength: overall_strength,
1970            has_modulation: false,
1971            modulation_type: ModulationType::Stable,
1972            modulation_score: 0.0,
1973            amplitude_trend: 0.0,
1974            strength_curve: envelope,
1975            time_points: argvals.to_vec(),
1976            min_strength: 0.0,
1977            max_strength: 0.0,
1978        };
1979    };
1980
1981    let stats = analyze_amplitude_envelope(
1982        &smoothed_envelope[interior_start..interior_end],
1983        &argvals[interior_start..interior_end],
1984        modulation_threshold,
1985    );
1986
1987    AmplitudeModulationResult {
1988        is_seasonal: true,
1989        seasonal_strength: overall_strength,
1990        has_modulation: stats.has_modulation,
1991        modulation_type: stats.modulation_type,
1992        modulation_score: stats.modulation_score,
1993        amplitude_trend: stats.amplitude_trend,
1994        strength_curve: envelope,
1995        time_points: argvals.to_vec(),
1996        min_strength: stats.min_amp,
1997        max_strength: stats.max_amp,
1998    }
1999}
2000
2001/// Detect amplitude modulation using Morlet wavelet transform.
2002///
2003/// Uses continuous wavelet transform at the seasonal period to extract
2004/// time-varying amplitude. This method is more robust to noise and can
2005/// better handle non-stationary signals compared to Hilbert transform.
2006///
2007/// # Arguments
2008/// * `data` - Column-major matrix (n x m)
2009/// * `n` - Number of samples
2010/// * `m` - Number of evaluation points
2011/// * `argvals` - Evaluation points
2012/// * `period` - Seasonal period in argvals units
2013/// * `modulation_threshold` - CV threshold for detecting modulation (default: 0.15)
2014/// * `seasonality_threshold` - Strength threshold for seasonality (default: 0.3)
2015///
2016/// # Returns
2017/// `WaveletAmplitudeResult` containing detection results and wavelet amplitude curve
2018///
2019/// # Notes
2020/// - Uses Morlet wavelet with ω₀ = 6 (standard choice)
2021/// - The scale parameter is derived from the period: scale = period * ω₀ / (2π)
2022/// - This relates to how wavelets measure period: for Morlet, period ≈ scale * 2π / ω₀
2023pub fn detect_amplitude_modulation_wavelet(
2024    data: &[f64],
2025    n: usize,
2026    m: usize,
2027    argvals: &[f64],
2028    period: f64,
2029    modulation_threshold: f64,
2030    seasonality_threshold: f64,
2031) -> WaveletAmplitudeResult {
2032    let empty_result = WaveletAmplitudeResult {
2033        is_seasonal: false,
2034        seasonal_strength: 0.0,
2035        has_modulation: false,
2036        modulation_type: ModulationType::NonSeasonal,
2037        modulation_score: 0.0,
2038        amplitude_trend: 0.0,
2039        wavelet_amplitude: Vec::new(),
2040        time_points: Vec::new(),
2041        scale: 0.0,
2042    };
2043
2044    if n == 0 || m < 4 || argvals.len() != m || period <= 0.0 {
2045        return empty_result;
2046    }
2047
2048    // Step 1: Check if seasonality exists using spectral method
2049    let overall_strength = seasonal_strength_spectral(data, n, m, argvals, period);
2050
2051    if overall_strength < seasonality_threshold {
2052        return WaveletAmplitudeResult {
2053            is_seasonal: false,
2054            seasonal_strength: overall_strength,
2055            has_modulation: false,
2056            modulation_type: ModulationType::NonSeasonal,
2057            modulation_score: 0.0,
2058            amplitude_trend: 0.0,
2059            wavelet_amplitude: Vec::new(),
2060            time_points: argvals.to_vec(),
2061            scale: 0.0,
2062        };
2063    }
2064
2065    // Step 2: Compute mean curve
2066    let mean_curve = compute_mean_curve(data, n, m);
2067
2068    // Remove DC component
2069    let dc: f64 = mean_curve.iter().sum::<f64>() / m as f64;
2070    let detrended: Vec<f64> = mean_curve.iter().map(|&x| x - dc).collect();
2071
2072    // Step 3: Compute wavelet transform at the seasonal period
2073    // For Morlet wavelet: period = scale * 2π / ω₀, so scale = period * ω₀ / (2π)
2074    let omega0 = 6.0; // Standard Morlet parameter
2075    let scale = period * omega0 / (2.0 * PI);
2076
2077    // Use FFT-based CWT for efficiency
2078    let wavelet_coeffs = cwt_morlet_fft(&detrended, argvals, scale, omega0);
2079
2080    if wavelet_coeffs.is_empty() {
2081        return WaveletAmplitudeResult {
2082            is_seasonal: true,
2083            seasonal_strength: overall_strength,
2084            has_modulation: false,
2085            modulation_type: ModulationType::Stable,
2086            modulation_score: 0.0,
2087            amplitude_trend: 0.0,
2088            wavelet_amplitude: Vec::new(),
2089            time_points: argvals.to_vec(),
2090            scale,
2091        };
2092    }
2093
2094    // Step 4: Extract amplitude (magnitude of wavelet coefficients)
2095    let wavelet_amplitude: Vec<f64> = wavelet_coeffs.iter().map(|c| c.norm()).collect();
2096
2097    // Step 5: Analyze amplitude envelope statistics (skip edges)
2098    let Some((interior_start, interior_end)) = valid_interior_bounds(m, 4) else {
2099        return WaveletAmplitudeResult {
2100            is_seasonal: true,
2101            seasonal_strength: overall_strength,
2102            has_modulation: false,
2103            modulation_type: ModulationType::Stable,
2104            modulation_score: 0.0,
2105            amplitude_trend: 0.0,
2106            wavelet_amplitude,
2107            time_points: argvals.to_vec(),
2108            scale,
2109        };
2110    };
2111
2112    let stats = analyze_amplitude_envelope(
2113        &wavelet_amplitude[interior_start..interior_end],
2114        &argvals[interior_start..interior_end],
2115        modulation_threshold,
2116    );
2117
2118    WaveletAmplitudeResult {
2119        is_seasonal: true,
2120        seasonal_strength: overall_strength,
2121        has_modulation: stats.has_modulation,
2122        modulation_type: stats.modulation_type,
2123        modulation_score: stats.modulation_score,
2124        amplitude_trend: stats.amplitude_trend,
2125        wavelet_amplitude,
2126        time_points: argvals.to_vec(),
2127        scale,
2128    }
2129}
2130
2131// ============================================================================
2132// Instantaneous Period
2133// ============================================================================
2134
2135/// Estimate instantaneous period using Hilbert transform.
2136///
2137/// For series with drifting/changing period, this computes the period
2138/// at each time point using the analytic signal.
2139///
2140/// # Arguments
2141/// * `data` - Column-major matrix (n x m)
2142/// * `n` - Number of samples
2143/// * `m` - Number of evaluation points
2144/// * `argvals` - Evaluation points
2145pub fn instantaneous_period(
2146    data: &[f64],
2147    n: usize,
2148    m: usize,
2149    argvals: &[f64],
2150) -> InstantaneousPeriod {
2151    if n == 0 || m < 4 || argvals.len() != m {
2152        return InstantaneousPeriod {
2153            period: Vec::new(),
2154            frequency: Vec::new(),
2155            amplitude: Vec::new(),
2156        };
2157    }
2158
2159    // Compute mean curve
2160    let mean_curve = compute_mean_curve(data, n, m);
2161
2162    // Remove DC component (detrend by subtracting mean)
2163    let dc: f64 = mean_curve.iter().sum::<f64>() / m as f64;
2164    let detrended: Vec<f64> = mean_curve.iter().map(|&x| x - dc).collect();
2165
2166    // Compute analytic signal via Hilbert transform
2167    let analytic = hilbert_transform(&detrended);
2168
2169    // Extract instantaneous amplitude and phase
2170    let amplitude: Vec<f64> = analytic.iter().map(|c| c.norm()).collect();
2171
2172    let phase: Vec<f64> = analytic.iter().map(|c| c.im.atan2(c.re)).collect();
2173
2174    // Unwrap phase
2175    let unwrapped_phase = unwrap_phase(&phase);
2176
2177    // Compute instantaneous frequency (derivative of phase)
2178    let dt = (argvals[m - 1] - argvals[0]) / (m - 1) as f64;
2179    let mut inst_freq = vec![0.0; m];
2180
2181    // Central differences for interior, forward/backward at boundaries
2182    if m > 1 {
2183        inst_freq[0] = (unwrapped_phase[1] - unwrapped_phase[0]) / dt / (2.0 * PI);
2184    }
2185    for j in 1..(m - 1) {
2186        inst_freq[j] = (unwrapped_phase[j + 1] - unwrapped_phase[j - 1]) / (2.0 * dt) / (2.0 * PI);
2187    }
2188    if m > 1 {
2189        inst_freq[m - 1] = (unwrapped_phase[m - 1] - unwrapped_phase[m - 2]) / dt / (2.0 * PI);
2190    }
2191
2192    // Period = 1/frequency (handle near-zero frequencies)
2193    let period: Vec<f64> = inst_freq
2194        .iter()
2195        .map(|&f| {
2196            if f.abs() > 1e-10 {
2197                (1.0 / f).abs()
2198            } else {
2199                f64::INFINITY
2200            }
2201        })
2202        .collect();
2203
2204    InstantaneousPeriod {
2205        period,
2206        frequency: inst_freq,
2207        amplitude,
2208    }
2209}
2210
2211// ============================================================================
2212// Peak Timing Variability Analysis
2213// ============================================================================
2214
2215/// Analyze peak timing variability across cycles.
2216///
2217/// For short series (e.g., 3-5 years of yearly data), this function detects
2218/// one peak per cycle and analyzes how peak timing varies between cycles.
2219///
2220/// # Arguments
2221/// * `data` - Column-major matrix (n x m)
2222/// * `n` - Number of samples
2223/// * `m` - Number of evaluation points
2224/// * `argvals` - Evaluation points
2225/// * `period` - Known period (e.g., 365 for daily data with yearly seasonality)
2226/// * `smooth_nbasis` - Number of Fourier basis functions for smoothing.
2227///   If None, uses GCV for automatic selection.
2228///
2229/// # Returns
2230/// Peak timing result with variability metrics
2231pub fn analyze_peak_timing(
2232    data: &[f64],
2233    n: usize,
2234    m: usize,
2235    argvals: &[f64],
2236    period: f64,
2237    smooth_nbasis: Option<usize>,
2238) -> PeakTimingResult {
2239    if n == 0 || m < 3 || argvals.len() != m || period <= 0.0 {
2240        return PeakTimingResult {
2241            peak_times: Vec::new(),
2242            peak_values: Vec::new(),
2243            normalized_timing: Vec::new(),
2244            mean_timing: f64::NAN,
2245            std_timing: f64::NAN,
2246            range_timing: f64::NAN,
2247            variability_score: f64::NAN,
2248            timing_trend: f64::NAN,
2249            cycle_indices: Vec::new(),
2250        };
2251    }
2252
2253    // Detect peaks with minimum distance constraint of 0.7 * period
2254    // This ensures we get at most one peak per cycle
2255    let min_distance = period * 0.7;
2256    let peaks = detect_peaks(
2257        data,
2258        n,
2259        m,
2260        argvals,
2261        Some(min_distance),
2262        None, // No prominence filter
2263        true, // Smooth first with Fourier basis
2264        smooth_nbasis,
2265    );
2266
2267    // Use the first sample's peaks (for mean curve analysis)
2268    // If multiple samples, we take the mean curve which is effectively in sample 0
2269    let sample_peaks = if peaks.peaks.is_empty() {
2270        Vec::new()
2271    } else {
2272        peaks.peaks[0].clone()
2273    };
2274
2275    if sample_peaks.is_empty() {
2276        return PeakTimingResult {
2277            peak_times: Vec::new(),
2278            peak_values: Vec::new(),
2279            normalized_timing: Vec::new(),
2280            mean_timing: f64::NAN,
2281            std_timing: f64::NAN,
2282            range_timing: f64::NAN,
2283            variability_score: f64::NAN,
2284            timing_trend: f64::NAN,
2285            cycle_indices: Vec::new(),
2286        };
2287    }
2288
2289    let peak_times: Vec<f64> = sample_peaks.iter().map(|p| p.time).collect();
2290    let peak_values: Vec<f64> = sample_peaks.iter().map(|p| p.value).collect();
2291
2292    // Compute normalized timing (position within cycle, 0-1 scale)
2293    let t_start = argvals[0];
2294    let normalized_timing: Vec<f64> = peak_times
2295        .iter()
2296        .map(|&t| {
2297            let cycle_pos = (t - t_start) % period;
2298            cycle_pos / period
2299        })
2300        .collect();
2301
2302    // Compute cycle indices (1-indexed)
2303    let cycle_indices: Vec<usize> = peak_times
2304        .iter()
2305        .map(|&t| ((t - t_start) / period).floor() as usize + 1)
2306        .collect();
2307
2308    // Compute statistics
2309    let n_peaks = normalized_timing.len() as f64;
2310    let mean_timing = normalized_timing.iter().sum::<f64>() / n_peaks;
2311
2312    let variance: f64 = normalized_timing
2313        .iter()
2314        .map(|&x| (x - mean_timing).powi(2))
2315        .sum::<f64>()
2316        / n_peaks;
2317    let std_timing = variance.sqrt();
2318
2319    let min_timing = normalized_timing
2320        .iter()
2321        .cloned()
2322        .fold(f64::INFINITY, f64::min);
2323    let max_timing = normalized_timing
2324        .iter()
2325        .cloned()
2326        .fold(f64::NEG_INFINITY, f64::max);
2327    let range_timing = max_timing - min_timing;
2328
2329    // Variability score: normalized std deviation
2330    // Max possible std for uniform in [0,1] is ~0.289, so we scale by that
2331    // But since peaks cluster, we use 0.1 as "high" variability threshold
2332    let variability_score = (std_timing / 0.1).min(1.0);
2333
2334    // Timing trend: linear regression of normalized timing on cycle index
2335    let cycle_idx_f64: Vec<f64> = cycle_indices.iter().map(|&i| i as f64).collect();
2336    let timing_trend = linear_slope(&cycle_idx_f64, &normalized_timing);
2337
2338    PeakTimingResult {
2339        peak_times,
2340        peak_values,
2341        normalized_timing,
2342        mean_timing,
2343        std_timing,
2344        range_timing,
2345        variability_score,
2346        timing_trend,
2347        cycle_indices,
2348    }
2349}
2350
2351// ============================================================================
2352// Seasonality Classification
2353// ============================================================================
2354
2355/// Classify the type of seasonality in functional data.
2356///
2357/// This is particularly useful for short series (3-5 years) where you need
2358/// to identify:
2359/// - Whether seasonality is present
2360/// - Whether peak timing is stable or variable
2361/// - Which cycles have weak or missing seasonality
2362///
2363/// # Arguments
2364/// * `data` - Column-major matrix (n x m)
2365/// * `n` - Number of samples
2366/// * `m` - Number of evaluation points
2367/// * `argvals` - Evaluation points
2368/// * `period` - Known seasonal period
2369/// * `strength_threshold` - Threshold for seasonal/non-seasonal (default: 0.3)
2370/// * `timing_threshold` - Max std of normalized timing for "stable" (default: 0.05)
2371///
2372/// # Returns
2373/// Seasonality classification with type and diagnostics
2374pub fn classify_seasonality(
2375    data: &[f64],
2376    n: usize,
2377    m: usize,
2378    argvals: &[f64],
2379    period: f64,
2380    strength_threshold: Option<f64>,
2381    timing_threshold: Option<f64>,
2382) -> SeasonalityClassification {
2383    let strength_thresh = strength_threshold.unwrap_or(0.3);
2384    let timing_thresh = timing_threshold.unwrap_or(0.05);
2385
2386    if n == 0 || m < 4 || argvals.len() != m || period <= 0.0 {
2387        return SeasonalityClassification {
2388            is_seasonal: false,
2389            has_stable_timing: false,
2390            timing_variability: f64::NAN,
2391            seasonal_strength: f64::NAN,
2392            cycle_strengths: Vec::new(),
2393            weak_seasons: Vec::new(),
2394            classification: SeasonalType::NonSeasonal,
2395            peak_timing: None,
2396        };
2397    }
2398
2399    // Compute overall seasonal strength
2400    let overall_strength = seasonal_strength_variance(data, n, m, argvals, period, 3);
2401
2402    let (cycle_strengths, weak_seasons) =
2403        compute_cycle_strengths(data, n, m, argvals, period, strength_thresh);
2404    let n_cycles = cycle_strengths.len();
2405
2406    // Analyze peak timing
2407    let peak_timing = analyze_peak_timing(data, n, m, argvals, period, None);
2408
2409    // Determine classification
2410    let is_seasonal = overall_strength >= strength_thresh;
2411    let has_stable_timing = peak_timing.std_timing <= timing_thresh;
2412    let timing_variability = peak_timing.variability_score;
2413
2414    // Classify based on patterns
2415    let n_weak = weak_seasons.len();
2416    let classification = if !is_seasonal {
2417        SeasonalType::NonSeasonal
2418    } else if n_cycles > 0 && n_weak as f64 / n_cycles as f64 > 0.3 {
2419        // More than 30% of cycles are weak
2420        SeasonalType::IntermittentSeasonal
2421    } else if !has_stable_timing {
2422        SeasonalType::VariableTiming
2423    } else {
2424        SeasonalType::StableSeasonal
2425    };
2426
2427    SeasonalityClassification {
2428        is_seasonal,
2429        has_stable_timing,
2430        timing_variability,
2431        seasonal_strength: overall_strength,
2432        cycle_strengths,
2433        weak_seasons,
2434        classification,
2435        peak_timing: Some(peak_timing),
2436    }
2437}
2438
2439/// Detect seasonality changes with automatic threshold selection.
2440///
2441/// Uses Otsu's method or percentile-based threshold instead of a fixed value.
2442///
2443/// # Arguments
2444/// * `data` - Column-major matrix (n x m)
2445/// * `n` - Number of samples
2446/// * `m` - Number of evaluation points
2447/// * `argvals` - Evaluation points
2448/// * `period` - Seasonal period
2449/// * `threshold_method` - Method for threshold selection
2450/// * `window_size` - Window size for local strength estimation
2451/// * `min_duration` - Minimum duration to confirm a change
2452pub fn detect_seasonality_changes_auto(
2453    data: &[f64],
2454    n: usize,
2455    m: usize,
2456    argvals: &[f64],
2457    period: f64,
2458    threshold_method: ThresholdMethod,
2459    window_size: f64,
2460    min_duration: f64,
2461) -> ChangeDetectionResult {
2462    if n == 0 || m < 4 || argvals.len() != m {
2463        return ChangeDetectionResult {
2464            change_points: Vec::new(),
2465            strength_curve: Vec::new(),
2466        };
2467    }
2468
2469    // Compute time-varying seasonal strength
2470    let strength_curve = seasonal_strength_windowed(
2471        data,
2472        n,
2473        m,
2474        argvals,
2475        period,
2476        window_size,
2477        StrengthMethod::Variance,
2478    );
2479
2480    if strength_curve.is_empty() {
2481        return ChangeDetectionResult {
2482            change_points: Vec::new(),
2483            strength_curve: Vec::new(),
2484        };
2485    }
2486
2487    // Determine threshold
2488    let threshold = match threshold_method {
2489        ThresholdMethod::Fixed(t) => t,
2490        ThresholdMethod::Percentile(p) => {
2491            let mut sorted: Vec<f64> = strength_curve
2492                .iter()
2493                .copied()
2494                .filter(|x| x.is_finite())
2495                .collect();
2496            sorted.sort_by(|a, b| a.partial_cmp(b).unwrap_or(std::cmp::Ordering::Equal));
2497            if sorted.is_empty() {
2498                0.5
2499            } else {
2500                let idx = ((p / 100.0) * sorted.len() as f64) as usize;
2501                sorted[idx.min(sorted.len() - 1)]
2502            }
2503        }
2504        ThresholdMethod::Otsu => otsu_threshold(&strength_curve),
2505    };
2506
2507    // Now use the regular detection with computed threshold
2508    detect_seasonality_changes(
2509        data,
2510        n,
2511        m,
2512        argvals,
2513        period,
2514        threshold,
2515        window_size,
2516        min_duration,
2517    )
2518}
2519
2520/// Result of SAZED ensemble period detection.
2521#[derive(Debug, Clone)]
2522pub struct SazedResult {
2523    /// Primary detected period (consensus from ensemble)
2524    pub period: f64,
2525    /// Confidence score (0-1, based on component agreement)
2526    pub confidence: f64,
2527    /// Periods detected by each component (may be NaN if not detected)
2528    pub component_periods: SazedComponents,
2529    /// Number of components that agreed on the final period
2530    pub agreeing_components: usize,
2531}
2532
2533/// Individual period estimates from each SAZED component.
2534#[derive(Debug, Clone)]
2535pub struct SazedComponents {
2536    /// Period from spectral (FFT) detection
2537    pub spectral: f64,
2538    /// Period from ACF peak detection
2539    pub acf_peak: f64,
2540    /// Period from weighted ACF average
2541    pub acf_average: f64,
2542    /// Period from ACF zero-crossing analysis
2543    pub zero_crossing: f64,
2544    /// Period from spectral differencing
2545    pub spectral_diff: f64,
2546}
2547
2548/// SAZED: Spectral-ACF Zero-crossing Ensemble Detection
2549///
2550/// A parameter-free ensemble method for robust period detection.
2551/// Combines 5 detection components:
2552/// 1. Spectral (FFT) - peaks in periodogram
2553/// 2. ACF peak - first significant peak in autocorrelation
2554/// 3. ACF average - weighted mean of ACF peaks
2555/// 4. Zero-crossing - period from ACF zero crossings
2556/// 5. Spectral differencing - FFT on first-differenced signal
2557///
2558/// Each component provides both a period estimate and a confidence score.
2559/// Only components with sufficient confidence participate in voting.
2560/// The final period is chosen by majority voting with tolerance.
2561///
2562/// # Arguments
2563/// * `data` - Input signal (1D time series or mean curve from fdata)
2564/// * `argvals` - Time points corresponding to data
2565/// * `tolerance` - Relative tolerance for considering periods equal (default: 0.05 = 5%)
2566///
2567/// # Returns
2568/// * `SazedResult` containing the consensus period and component details
2569pub fn sazed(data: &[f64], argvals: &[f64], tolerance: Option<f64>) -> SazedResult {
2570    let n = data.len();
2571    let tol = tolerance.unwrap_or(0.05); // Tighter default tolerance
2572
2573    if n < 8 || argvals.len() != n {
2574        return SazedResult {
2575            period: f64::NAN,
2576            confidence: 0.0,
2577            component_periods: SazedComponents {
2578                spectral: f64::NAN,
2579                acf_peak: f64::NAN,
2580                acf_average: f64::NAN,
2581                zero_crossing: f64::NAN,
2582                spectral_diff: f64::NAN,
2583            },
2584            agreeing_components: 0,
2585        };
2586    }
2587
2588    let dt = (argvals[n - 1] - argvals[0]) / (n - 1) as f64;
2589    let max_lag = (n / 2).max(4);
2590    let signal_range = argvals[n - 1] - argvals[0];
2591
2592    // Minimum detectable period (at least 3 cycles)
2593    let min_period = signal_range / (n as f64 / 3.0);
2594    // Maximum detectable period (at most 2 complete cycles)
2595    let max_period = signal_range / 2.0;
2596
2597    // Component 1: Spectral (FFT) detection with confidence
2598    let (spectral_period, spectral_conf) = sazed_spectral_with_confidence(data, argvals);
2599
2600    // Component 2: ACF peak detection with confidence
2601    let (acf_peak_period, acf_peak_conf) = sazed_acf_peak_with_confidence(data, dt, max_lag);
2602
2603    // Component 3: ACF weighted average (uses ACF peak confidence)
2604    let acf_average_period = sazed_acf_average(data, dt, max_lag);
2605
2606    // Component 4: Zero-crossing analysis with confidence
2607    let (zero_crossing_period, zero_crossing_conf) =
2608        sazed_zero_crossing_with_confidence(data, dt, max_lag);
2609
2610    // Component 5: Spectral on differenced signal with confidence
2611    let (spectral_diff_period, spectral_diff_conf) =
2612        sazed_spectral_diff_with_confidence(data, argvals);
2613
2614    let components = SazedComponents {
2615        spectral: spectral_period,
2616        acf_peak: acf_peak_period,
2617        acf_average: acf_average_period,
2618        zero_crossing: zero_crossing_period,
2619        spectral_diff: spectral_diff_period,
2620    };
2621
2622    // Confidence thresholds for each component (tuned to minimize FPR on noise)
2623    // For Gaussian noise: spectral peaks rarely exceed 6x median, ACF ~1/sqrt(n)
2624    let spectral_thresh = 8.0; // Power ratio must be > 8x median (noise rarely exceeds 6x)
2625    let acf_thresh = 0.3; // ACF correlation must be > 0.3 (noise ~0.1 for n=100)
2626    let zero_crossing_thresh = 0.9; // Zero-crossing consistency > 90%
2627    let spectral_diff_thresh = 6.0; // Diff spectral power ratio > 6x
2628
2629    // Minimum number of confident components required to report a period
2630    let min_confident_components = 2;
2631
2632    // Collect valid periods (only from components with sufficient confidence)
2633    let confident_periods: Vec<f64> = [
2634        validate_sazed_component(
2635            spectral_period,
2636            spectral_conf,
2637            min_period,
2638            max_period,
2639            spectral_thresh,
2640        ),
2641        validate_sazed_component(
2642            acf_peak_period,
2643            acf_peak_conf,
2644            min_period,
2645            max_period,
2646            acf_thresh,
2647        ),
2648        validate_sazed_component(
2649            acf_average_period,
2650            acf_peak_conf,
2651            min_period,
2652            max_period,
2653            acf_thresh,
2654        ),
2655        validate_sazed_component(
2656            zero_crossing_period,
2657            zero_crossing_conf,
2658            min_period,
2659            max_period,
2660            zero_crossing_thresh,
2661        ),
2662        validate_sazed_component(
2663            spectral_diff_period,
2664            spectral_diff_conf,
2665            min_period,
2666            max_period,
2667            spectral_diff_thresh,
2668        ),
2669    ]
2670    .into_iter()
2671    .flatten()
2672    .collect();
2673
2674    // Require minimum number of confident components before reporting a period
2675    if confident_periods.len() < min_confident_components {
2676        return SazedResult {
2677            period: f64::NAN,
2678            confidence: 0.0,
2679            component_periods: components,
2680            agreeing_components: confident_periods.len(),
2681        };
2682    }
2683
2684    // Ensemble voting: find the mode with tolerance
2685    let (consensus_period, agreeing_count) = find_consensus_period(&confident_periods, tol);
2686    let confidence = agreeing_count as f64 / 5.0;
2687
2688    SazedResult {
2689        period: consensus_period,
2690        confidence,
2691        component_periods: components,
2692        agreeing_components: agreeing_count,
2693    }
2694}
2695
2696/// SAZED for functional data (matrix format)
2697///
2698/// Computes mean curve first, then applies SAZED.
2699///
2700/// # Arguments
2701/// * `data` - Column-major matrix (n x m)
2702/// * `n` - Number of samples
2703/// * `m` - Number of evaluation points
2704/// * `argvals` - Evaluation points
2705/// * `tolerance` - Relative tolerance for period matching
2706pub fn sazed_fdata(
2707    data: &[f64],
2708    n: usize,
2709    m: usize,
2710    argvals: &[f64],
2711    tolerance: Option<f64>,
2712) -> SazedResult {
2713    if n == 0 || m < 8 || argvals.len() != m {
2714        return SazedResult {
2715            period: f64::NAN,
2716            confidence: 0.0,
2717            component_periods: SazedComponents {
2718                spectral: f64::NAN,
2719                acf_peak: f64::NAN,
2720                acf_average: f64::NAN,
2721                zero_crossing: f64::NAN,
2722                spectral_diff: f64::NAN,
2723            },
2724            agreeing_components: 0,
2725        };
2726    }
2727
2728    let mean_curve = compute_mean_curve(data, n, m);
2729    sazed(&mean_curve, argvals, tolerance)
2730}
2731
2732/// Spectral component with confidence: returns (period, power_ratio)
2733fn sazed_spectral_with_confidence(data: &[f64], argvals: &[f64]) -> (f64, f64) {
2734    let (frequencies, power) = periodogram(data, argvals);
2735
2736    if frequencies.len() < 3 {
2737        return (f64::NAN, 0.0);
2738    }
2739
2740    // Find peaks in power spectrum (skip DC)
2741    let power_no_dc: Vec<f64> = power.iter().skip(1).copied().collect();
2742
2743    if power_no_dc.is_empty() {
2744        return (f64::NAN, 0.0);
2745    }
2746
2747    // Calculate noise floor as median
2748    let mut sorted_power = power_no_dc.clone();
2749    sorted_power.sort_by(|a, b| a.partial_cmp(b).unwrap_or(std::cmp::Ordering::Equal));
2750    let noise_floor = sorted_power[sorted_power.len() / 2].max(1e-15);
2751
2752    // Find global maximum
2753    let mut max_idx = 0;
2754    let mut max_val = 0.0;
2755    for (i, &p) in power_no_dc.iter().enumerate() {
2756        if p > max_val {
2757            max_val = p;
2758            max_idx = i;
2759        }
2760    }
2761
2762    let power_ratio = max_val / noise_floor;
2763    let freq = frequencies[max_idx + 1];
2764
2765    if freq > 1e-15 {
2766        (1.0 / freq, power_ratio)
2767    } else {
2768        (f64::NAN, 0.0)
2769    }
2770}
2771
2772/// ACF peak component with confidence: returns (period, acf_value_at_peak)
2773fn sazed_acf_peak_with_confidence(data: &[f64], dt: f64, max_lag: usize) -> (f64, f64) {
2774    let acf = autocorrelation(data, max_lag);
2775
2776    match find_first_acf_peak(&acf) {
2777        Some((peak_lag, acf_value)) => (peak_lag as f64 * dt, acf_value),
2778        None => (f64::NAN, 0.0),
2779    }
2780}
2781
2782/// ACF average component: weighted mean of ACF peak locations
2783fn sazed_acf_average(data: &[f64], dt: f64, max_lag: usize) -> f64 {
2784    let acf = autocorrelation(data, max_lag);
2785
2786    if acf.len() < 4 {
2787        return f64::NAN;
2788    }
2789
2790    // Find all peaks in ACF
2791    let peaks = find_peaks_1d(&acf[1..], 1);
2792
2793    if peaks.is_empty() {
2794        return f64::NAN;
2795    }
2796
2797    // Weight peaks by their ACF value
2798    let mut weighted_sum = 0.0;
2799    let mut weight_sum = 0.0;
2800
2801    for (i, &peak_idx) in peaks.iter().enumerate() {
2802        let lag = peak_idx + 1;
2803        let weight = acf[lag].max(0.0);
2804
2805        if i == 0 {
2806            // First peak is the fundamental period
2807            weighted_sum += lag as f64 * weight;
2808            weight_sum += weight;
2809        } else {
2810            // Later peaks: estimate fundamental by dividing by harmonic number
2811            let expected_fundamental = peaks[0] + 1;
2812            let harmonic = ((lag as f64 / expected_fundamental as f64) + 0.5) as usize;
2813            if harmonic > 0 {
2814                let fundamental_est = lag as f64 / harmonic as f64;
2815                weighted_sum += fundamental_est * weight;
2816                weight_sum += weight;
2817            }
2818        }
2819    }
2820
2821    if weight_sum > 1e-15 {
2822        weighted_sum / weight_sum * dt
2823    } else {
2824        f64::NAN
2825    }
2826}
2827
2828/// Zero-crossing component with confidence: returns (period, consistency)
2829/// Consistency is how regular the zero crossings are (std/mean of half-periods)
2830fn sazed_zero_crossing_with_confidence(data: &[f64], dt: f64, max_lag: usize) -> (f64, f64) {
2831    let acf = autocorrelation(data, max_lag);
2832
2833    if acf.len() < 4 {
2834        return (f64::NAN, 0.0);
2835    }
2836
2837    // Find zero crossings (sign changes)
2838    let mut crossings = Vec::new();
2839    for i in 1..acf.len() {
2840        if acf[i - 1] * acf[i] < 0.0 {
2841            // Linear interpolation for more precise crossing
2842            let frac = acf[i - 1].abs() / (acf[i - 1].abs() + acf[i].abs());
2843            crossings.push((i - 1) as f64 + frac);
2844        }
2845    }
2846
2847    if crossings.len() < 2 {
2848        return (f64::NAN, 0.0);
2849    }
2850
2851    // Period is twice the distance between consecutive zero crossings
2852    // (ACF goes through two zero crossings per period)
2853    let mut half_periods = Vec::new();
2854    for i in 1..crossings.len() {
2855        half_periods.push(crossings[i] - crossings[i - 1]);
2856    }
2857
2858    if half_periods.is_empty() {
2859        return (f64::NAN, 0.0);
2860    }
2861
2862    // Calculate consistency: 1 - (std/mean) of half-periods
2863    // High consistency means regular zero crossings
2864    let mean: f64 = half_periods.iter().sum::<f64>() / half_periods.len() as f64;
2865    let variance: f64 =
2866        half_periods.iter().map(|x| (x - mean).powi(2)).sum::<f64>() / half_periods.len() as f64;
2867    let std = variance.sqrt();
2868    let consistency = (1.0 - std / mean.max(1e-15)).clamp(0.0, 1.0);
2869
2870    // Median half-period
2871    half_periods.sort_by(|a, b| a.partial_cmp(b).unwrap_or(std::cmp::Ordering::Equal));
2872    let median_half = half_periods[half_periods.len() / 2];
2873
2874    (2.0 * median_half * dt, consistency)
2875}
2876
2877/// Spectral differencing with confidence: returns (period, power_ratio)
2878fn sazed_spectral_diff_with_confidence(data: &[f64], argvals: &[f64]) -> (f64, f64) {
2879    if data.len() < 4 {
2880        return (f64::NAN, 0.0);
2881    }
2882
2883    // First difference to remove trend
2884    let diff: Vec<f64> = data.windows(2).map(|w| w[1] - w[0]).collect();
2885    let diff_argvals: Vec<f64> = argvals.windows(2).map(|w| (w[0] + w[1]) / 2.0).collect();
2886
2887    sazed_spectral_with_confidence(&diff, &diff_argvals)
2888}
2889
2890/// Find peaks in power spectrum above noise floor
2891fn find_spectral_peaks(power: &[f64]) -> Vec<usize> {
2892    if power.len() < 3 {
2893        return Vec::new();
2894    }
2895
2896    // Estimate noise floor as median power
2897    let mut sorted_power: Vec<f64> = power.to_vec();
2898    sorted_power.sort_by(|a, b| a.partial_cmp(b).unwrap_or(std::cmp::Ordering::Equal));
2899    let noise_floor = sorted_power[sorted_power.len() / 2];
2900    let threshold = noise_floor * 2.0; // Peaks must be at least 2x median
2901
2902    // Find all local maxima above threshold
2903    let mut peaks: Vec<(usize, f64)> = Vec::new();
2904    for i in 1..(power.len() - 1) {
2905        if power[i] > power[i - 1] && power[i] > power[i + 1] && power[i] > threshold {
2906            peaks.push((i, power[i]));
2907        }
2908    }
2909
2910    // Sort by power (descending)
2911    peaks.sort_by(|a, b| b.1.partial_cmp(&a.1).unwrap_or(std::cmp::Ordering::Equal));
2912
2913    peaks.into_iter().map(|(idx, _)| idx).collect()
2914}
2915
2916/// Find consensus period from multiple estimates using tolerance-based voting
2917fn find_consensus_period(periods: &[f64], tolerance: f64) -> (f64, usize) {
2918    if periods.is_empty() {
2919        return (f64::NAN, 0);
2920    }
2921    if periods.len() == 1 {
2922        return (periods[0], 1);
2923    }
2924
2925    let mut best_period = periods[0];
2926    let mut best_count = 0;
2927    let mut best_sum = 0.0;
2928
2929    for &p1 in periods {
2930        let (count, sum) = count_agreeing_periods(periods, p1, tolerance);
2931
2932        if count > best_count
2933            || (count == best_count && sum / count as f64 > best_sum / best_count.max(1) as f64)
2934        {
2935            best_count = count;
2936            best_period = sum / count as f64;
2937            best_sum = sum;
2938        }
2939    }
2940
2941    (best_period, best_count)
2942}
2943
2944/// Result of Autoperiod detection.
2945#[derive(Debug, Clone)]
2946pub struct AutoperiodResult {
2947    /// Detected period
2948    pub period: f64,
2949    /// Combined confidence (FFT * ACF validation)
2950    pub confidence: f64,
2951    /// FFT power at the detected period
2952    pub fft_power: f64,
2953    /// ACF validation score (0-1)
2954    pub acf_validation: f64,
2955    /// All candidate periods considered
2956    pub candidates: Vec<AutoperiodCandidate>,
2957}
2958
2959/// A candidate period from Autoperiod detection.
2960#[derive(Debug, Clone)]
2961pub struct AutoperiodCandidate {
2962    /// Candidate period
2963    pub period: f64,
2964    /// FFT power
2965    pub fft_power: f64,
2966    /// ACF validation score
2967    pub acf_score: f64,
2968    /// Combined score (power * validation)
2969    pub combined_score: f64,
2970}
2971
2972fn empty_autoperiod_result() -> AutoperiodResult {
2973    AutoperiodResult {
2974        period: f64::NAN,
2975        confidence: 0.0,
2976        fft_power: 0.0,
2977        acf_validation: 0.0,
2978        candidates: Vec::new(),
2979    }
2980}
2981
2982/// Build an autoperiod candidate from a spectral peak, refining with gradient ascent on ACF.
2983fn build_autoperiod_candidate(
2984    peak_idx: usize,
2985    frequencies: &[f64],
2986    power_no_dc: &[f64],
2987    acf: &[f64],
2988    dt: f64,
2989    steps: usize,
2990    total_power: f64,
2991) -> Option<AutoperiodCandidate> {
2992    let freq = frequencies[peak_idx + 1];
2993    if freq < 1e-15 {
2994        return None;
2995    }
2996    let fft_power = power_no_dc[peak_idx];
2997    let normalized_power = fft_power / total_power.max(1e-15);
2998    let refined_period = refine_period_gradient(acf, 1.0 / freq, dt, steps);
2999    let refined_acf_score = validate_period_acf(acf, refined_period, dt);
3000    Some(AutoperiodCandidate {
3001        period: refined_period,
3002        fft_power,
3003        acf_score: refined_acf_score,
3004        combined_score: normalized_power * refined_acf_score,
3005    })
3006}
3007
3008/// Autoperiod: Hybrid FFT + ACF Period Detection
3009///
3010/// Implements the Autoperiod algorithm (Vlachos et al. 2005) which:
3011/// 1. Computes the periodogram via FFT to find candidate periods
3012/// 2. Validates each candidate using the autocorrelation function
3013/// 3. Applies gradient ascent to refine the period estimate
3014/// 4. Returns the period with the highest combined confidence
3015///
3016/// This method is more robust than pure FFT because ACF validation
3017/// filters out spurious spectral peaks that don't correspond to
3018/// true periodicity.
3019///
3020/// # Arguments
3021/// * `data` - Input signal (1D time series)
3022/// * `argvals` - Time points corresponding to data
3023/// * `n_candidates` - Maximum number of FFT peaks to consider (default: 5)
3024/// * `gradient_steps` - Number of gradient ascent refinement steps (default: 10)
3025///
3026/// # Returns
3027/// * `AutoperiodResult` containing the best period and validation details
3028pub fn autoperiod(
3029    data: &[f64],
3030    argvals: &[f64],
3031    n_candidates: Option<usize>,
3032    gradient_steps: Option<usize>,
3033) -> AutoperiodResult {
3034    let n = data.len();
3035    let max_candidates = n_candidates.unwrap_or(5);
3036    let steps = gradient_steps.unwrap_or(10);
3037
3038    if n < 8 || argvals.len() != n {
3039        return empty_autoperiod_result();
3040    }
3041
3042    let dt = (argvals[n - 1] - argvals[0]) / (n - 1) as f64;
3043    let max_lag = (n / 2).max(4);
3044
3045    // Step 1: Compute periodogram and find candidate periods
3046    let (frequencies, power) = periodogram(data, argvals);
3047
3048    if frequencies.len() < 3 {
3049        return empty_autoperiod_result();
3050    }
3051
3052    // Find top spectral peaks
3053    let power_no_dc: Vec<f64> = power.iter().skip(1).copied().collect();
3054    let peak_indices = find_spectral_peaks(&power_no_dc);
3055
3056    if peak_indices.is_empty() {
3057        return empty_autoperiod_result();
3058    }
3059
3060    // Step 2: Compute ACF for validation
3061    let acf = autocorrelation(data, max_lag);
3062
3063    // Step 3: Validate each candidate and refine with gradient ascent
3064    let total_power: f64 = power_no_dc.iter().sum();
3065    let candidates: Vec<AutoperiodCandidate> = peak_indices
3066        .iter()
3067        .take(max_candidates)
3068        .filter_map(|&peak_idx| {
3069            build_autoperiod_candidate(
3070                peak_idx,
3071                &frequencies,
3072                &power_no_dc,
3073                &acf,
3074                dt,
3075                steps,
3076                total_power,
3077            )
3078        })
3079        .collect();
3080
3081    if candidates.is_empty() {
3082        return empty_autoperiod_result();
3083    }
3084
3085    // Select best candidate based on combined score
3086    let best = candidates
3087        .iter()
3088        .max_by(|a, b| {
3089            a.combined_score
3090                .partial_cmp(&b.combined_score)
3091                .unwrap_or(std::cmp::Ordering::Equal)
3092        })
3093        .unwrap();
3094
3095    AutoperiodResult {
3096        period: best.period,
3097        confidence: best.combined_score,
3098        fft_power: best.fft_power,
3099        acf_validation: best.acf_score,
3100        candidates,
3101    }
3102}
3103
3104/// Autoperiod for functional data (matrix format)
3105pub fn autoperiod_fdata(
3106    data: &[f64],
3107    n: usize,
3108    m: usize,
3109    argvals: &[f64],
3110    n_candidates: Option<usize>,
3111    gradient_steps: Option<usize>,
3112) -> AutoperiodResult {
3113    if n == 0 || m < 8 || argvals.len() != m {
3114        return AutoperiodResult {
3115            period: f64::NAN,
3116            confidence: 0.0,
3117            fft_power: 0.0,
3118            acf_validation: 0.0,
3119            candidates: Vec::new(),
3120        };
3121    }
3122
3123    let mean_curve = compute_mean_curve(data, n, m);
3124    autoperiod(&mean_curve, argvals, n_candidates, gradient_steps)
3125}
3126
3127/// Validate a candidate period using ACF
3128fn validate_period_acf(acf: &[f64], period: f64, dt: f64) -> f64 {
3129    let lag = (period / dt).round() as usize;
3130
3131    if lag == 0 || lag >= acf.len() {
3132        return 0.0;
3133    }
3134
3135    // Score based on ACF value at the period lag
3136    // Positive ACF values indicate valid periodicity
3137    let acf_at_lag = acf[lag];
3138
3139    // Also check harmonics (period/2, period*2) for consistency
3140    let half_lag = lag / 2;
3141    let double_lag = lag * 2;
3142
3143    let mut score = acf_at_lag.max(0.0);
3144
3145    // For a true period, ACF at half-period should be low/negative
3146    // and ACF at double-period should also be high
3147    if half_lag > 0 && half_lag < acf.len() {
3148        let half_acf = acf[half_lag];
3149        // Penalize if half-period has high ACF (suggests half-period is real)
3150        if half_acf > acf_at_lag * 0.7 {
3151            score *= 0.5;
3152        }
3153    }
3154
3155    if double_lag < acf.len() {
3156        let double_acf = acf[double_lag];
3157        // Bonus if double-period also shows periodicity
3158        if double_acf > 0.3 {
3159            score *= 1.2;
3160        }
3161    }
3162
3163    score.min(1.0)
3164}
3165
3166/// Refine period estimate using gradient ascent on ACF
3167fn refine_period_gradient(acf: &[f64], initial_period: f64, dt: f64, steps: usize) -> f64 {
3168    let mut period = initial_period;
3169    let step_size = dt * 0.5; // Search step size
3170
3171    for _ in 0..steps {
3172        let current_score = validate_period_acf(acf, period, dt);
3173        let left_score = validate_period_acf(acf, period - step_size, dt);
3174        let right_score = validate_period_acf(acf, period + step_size, dt);
3175
3176        if left_score > current_score && left_score > right_score {
3177            period -= step_size;
3178        } else if right_score > current_score {
3179            period += step_size;
3180        }
3181        // If current is best, we've converged
3182    }
3183
3184    period.max(dt) // Ensure period is at least one time step
3185}
3186
3187/// Result of CFDAutoperiod detection.
3188#[derive(Debug, Clone)]
3189pub struct CfdAutoperiodResult {
3190    /// Detected period (primary)
3191    pub period: f64,
3192    /// Confidence score
3193    pub confidence: f64,
3194    /// ACF validation score for the primary period
3195    pub acf_validation: f64,
3196    /// All detected periods (cluster centers)
3197    pub periods: Vec<f64>,
3198    /// Confidence for each detected period
3199    pub confidences: Vec<f64>,
3200}
3201
3202/// Convert spectral peak indices to candidate (period, normalized_power) pairs.
3203fn generate_cfd_candidates(
3204    frequencies: &[f64],
3205    power_no_dc: &[f64],
3206    peak_indices: &[usize],
3207) -> Vec<(f64, f64)> {
3208    let total_power: f64 = power_no_dc.iter().sum();
3209    peak_indices
3210        .iter()
3211        .filter_map(|&peak_idx| {
3212            let freq = frequencies[peak_idx + 1];
3213            if freq > 1e-15 {
3214                let period = 1.0 / freq;
3215                let normalized_power = power_no_dc[peak_idx] / total_power.max(1e-15);
3216                Some((period, normalized_power))
3217            } else {
3218                None
3219            }
3220        })
3221        .collect()
3222}
3223
3224/// Validate clustered period candidates using ACF, returning (period, acf_score, power) triples.
3225fn validate_cfd_candidates(clusters: &[(f64, f64)], acf: &[f64], dt: f64) -> Vec<(f64, f64, f64)> {
3226    clusters
3227        .iter()
3228        .filter_map(|&(center, power_sum)| {
3229            let acf_score = validate_period_acf(acf, center, dt);
3230            if acf_score > 0.1 {
3231                Some((center, acf_score, power_sum))
3232            } else {
3233                None
3234            }
3235        })
3236        .collect()
3237}
3238
3239/// Validate cluster candidates with ACF, falling back to best cluster if none pass.
3240fn validate_or_fallback_cfd(
3241    validated: Vec<(f64, f64, f64)>,
3242    candidates: &[(f64, f64)],
3243    tol: f64,
3244    min_size: usize,
3245) -> Vec<(f64, f64, f64)> {
3246    if !validated.is_empty() {
3247        return validated;
3248    }
3249    // Fallback: pick highest-power cluster without ACF validation
3250    cluster_periods(candidates, tol, min_size)
3251        .into_iter()
3252        .max_by(|a, b| a.1.partial_cmp(&b.1).unwrap_or(std::cmp::Ordering::Equal))
3253        .map(|(center, power_sum)| vec![(center, 0.0, power_sum)])
3254        .unwrap_or_default()
3255}
3256
3257/// Rank validated results by combined score (acf * power).
3258/// Returns (periods, confidences, top_acf_validation).
3259fn rank_cfd_results(validated: &[(f64, f64, f64)]) -> (Vec<f64>, Vec<f64>, f64) {
3260    let mut sorted: Vec<_> = validated.to_vec();
3261    sorted.sort_by(|a, b| {
3262        (b.1 * b.2)
3263            .partial_cmp(&(a.1 * a.2))
3264            .unwrap_or(std::cmp::Ordering::Equal)
3265    });
3266    let top_acf = sorted[0].1;
3267    let periods = sorted.iter().map(|v| v.0).collect();
3268    let confidences = sorted.iter().map(|v| v.1 * v.2).collect();
3269    (periods, confidences, top_acf)
3270}
3271
3272fn empty_cfd_result() -> CfdAutoperiodResult {
3273    CfdAutoperiodResult {
3274        period: f64::NAN,
3275        confidence: 0.0,
3276        acf_validation: 0.0,
3277        periods: Vec::new(),
3278        confidences: Vec::new(),
3279    }
3280}
3281
3282/// Extract spectral candidates from differenced data: difference, periodogram, peak-find, generate.
3283fn extract_cfd_spectral_candidates(data: &[f64], argvals: &[f64]) -> Option<Vec<(f64, f64)>> {
3284    let diff: Vec<f64> = data.windows(2).map(|w| w[1] - w[0]).collect();
3285    let diff_argvals: Vec<f64> = argvals.windows(2).map(|w| (w[0] + w[1]) / 2.0).collect();
3286    let (frequencies, power) = periodogram(&diff, &diff_argvals);
3287    if frequencies.len() < 3 {
3288        return None;
3289    }
3290    let power_no_dc: Vec<f64> = power.iter().skip(1).copied().collect();
3291    let peak_indices = find_spectral_peaks(&power_no_dc);
3292    if peak_indices.is_empty() {
3293        return None;
3294    }
3295    let candidates = generate_cfd_candidates(&frequencies, &power_no_dc, &peak_indices);
3296    if candidates.is_empty() {
3297        None
3298    } else {
3299        Some(candidates)
3300    }
3301}
3302
3303/// CFDAutoperiod: Clustered Filtered Detrended Autoperiod
3304///
3305/// Implements the CFDAutoperiod algorithm (Puech et al. 2020) which:
3306/// 1. Applies first-order differencing to remove trends
3307/// 2. Computes FFT on the detrended signal
3308/// 3. Identifies candidate periods from periodogram peaks
3309/// 4. Clusters nearby candidates using density-based clustering
3310/// 5. Validates cluster centers using ACF on the original signal
3311///
3312/// This method is particularly effective for signals with strong trends
3313/// and handles multiple periodicities by detecting clusters of candidate periods.
3314///
3315/// # Arguments
3316/// * `data` - Input signal (1D time series)
3317/// * `argvals` - Time points corresponding to data
3318/// * `cluster_tolerance` - Relative tolerance for clustering periods (default: 0.1 = 10%)
3319/// * `min_cluster_size` - Minimum number of candidates to form a cluster (default: 1)
3320///
3321/// # Returns
3322/// * `CfdAutoperiodResult` containing detected periods and validation scores
3323pub fn cfd_autoperiod(
3324    data: &[f64],
3325    argvals: &[f64],
3326    cluster_tolerance: Option<f64>,
3327    min_cluster_size: Option<usize>,
3328) -> CfdAutoperiodResult {
3329    let n = data.len();
3330    let tol = cluster_tolerance.unwrap_or(0.1);
3331    let min_size = min_cluster_size.unwrap_or(1);
3332
3333    if n < 8 || argvals.len() != n {
3334        return empty_cfd_result();
3335    }
3336
3337    let dt = (argvals[n - 1] - argvals[0]) / (n - 1) as f64;
3338    let max_lag = (n / 2).max(4);
3339
3340    let Some(candidates) = extract_cfd_spectral_candidates(data, argvals) else {
3341        return empty_cfd_result();
3342    };
3343
3344    let clusters = cluster_periods(&candidates, tol, min_size);
3345    if clusters.is_empty() {
3346        return empty_cfd_result();
3347    }
3348
3349    let acf = autocorrelation(data, max_lag);
3350    let validated = validate_cfd_candidates(&clusters, &acf, dt);
3351    let validated = validate_or_fallback_cfd(validated, &candidates, tol, min_size);
3352    let (periods, confidences, top_acf) = rank_cfd_results(&validated);
3353
3354    CfdAutoperiodResult {
3355        period: periods[0],
3356        confidence: confidences[0],
3357        acf_validation: top_acf,
3358        periods,
3359        confidences,
3360    }
3361}
3362
3363/// CFDAutoperiod for functional data (matrix format)
3364pub fn cfd_autoperiod_fdata(
3365    data: &[f64],
3366    n: usize,
3367    m: usize,
3368    argvals: &[f64],
3369    cluster_tolerance: Option<f64>,
3370    min_cluster_size: Option<usize>,
3371) -> CfdAutoperiodResult {
3372    if n == 0 || m < 8 || argvals.len() != m {
3373        return CfdAutoperiodResult {
3374            period: f64::NAN,
3375            confidence: 0.0,
3376            acf_validation: 0.0,
3377            periods: Vec::new(),
3378            confidences: Vec::new(),
3379        };
3380    }
3381
3382    let mean_curve = compute_mean_curve(data, n, m);
3383    cfd_autoperiod(&mean_curve, argvals, cluster_tolerance, min_cluster_size)
3384}
3385
3386/// Cluster periods using a simple density-based approach
3387fn cluster_periods(candidates: &[(f64, f64)], tolerance: f64, min_size: usize) -> Vec<(f64, f64)> {
3388    if candidates.is_empty() {
3389        return Vec::new();
3390    }
3391
3392    // Sort candidates by period
3393    let mut sorted: Vec<(f64, f64)> = candidates.to_vec();
3394    sorted.sort_by(|a, b| a.0.partial_cmp(&b.0).unwrap_or(std::cmp::Ordering::Equal));
3395
3396    let mut clusters: Vec<(f64, f64)> = Vec::new(); // (center, total_power)
3397    let mut current_cluster: Vec<(f64, f64)> = vec![sorted[0]];
3398
3399    for &(period, power) in sorted.iter().skip(1) {
3400        let cluster_center =
3401            current_cluster.iter().map(|(p, _)| p).sum::<f64>() / current_cluster.len() as f64;
3402
3403        let rel_diff = (period - cluster_center).abs() / cluster_center.max(period);
3404
3405        if rel_diff <= tolerance {
3406            // Add to current cluster
3407            current_cluster.push((period, power));
3408        } else {
3409            // Finish current cluster and start new one
3410            if current_cluster.len() >= min_size {
3411                let center = current_cluster.iter().map(|(p, pw)| p * pw).sum::<f64>()
3412                    / current_cluster
3413                        .iter()
3414                        .map(|(_, pw)| pw)
3415                        .sum::<f64>()
3416                        .max(1e-15);
3417                let total_power: f64 = current_cluster.iter().map(|(_, pw)| pw).sum();
3418                clusters.push((center, total_power));
3419            }
3420            current_cluster = vec![(period, power)];
3421        }
3422    }
3423
3424    // Don't forget the last cluster
3425    if current_cluster.len() >= min_size {
3426        let center = current_cluster.iter().map(|(p, pw)| p * pw).sum::<f64>()
3427            / current_cluster
3428                .iter()
3429                .map(|(_, pw)| pw)
3430                .sum::<f64>()
3431                .max(1e-15);
3432        let total_power: f64 = current_cluster.iter().map(|(_, pw)| pw).sum();
3433        clusters.push((center, total_power));
3434    }
3435
3436    clusters
3437}
3438
3439// ============================================================================
3440// Lomb-Scargle Periodogram
3441// ============================================================================
3442
3443/// Result of Lomb-Scargle periodogram analysis.
3444#[derive(Debug, Clone)]
3445pub struct LombScargleResult {
3446    /// Test frequencies
3447    pub frequencies: Vec<f64>,
3448    /// Corresponding periods (1/frequency)
3449    pub periods: Vec<f64>,
3450    /// Normalized Lomb-Scargle power at each frequency
3451    pub power: Vec<f64>,
3452    /// Peak period (highest power)
3453    pub peak_period: f64,
3454    /// Peak frequency
3455    pub peak_frequency: f64,
3456    /// Peak power
3457    pub peak_power: f64,
3458    /// False alarm probability at peak (significance level)
3459    pub false_alarm_probability: f64,
3460    /// Significance level (1 - FAP)
3461    pub significance: f64,
3462}
3463
3464/// Compute Lomb-Scargle periodogram for irregularly sampled data.
3465///
3466/// The Lomb-Scargle periodogram is designed for unevenly-spaced time series
3467/// and reduces to the standard periodogram for evenly-spaced data.
3468///
3469/// # Algorithm
3470/// Following Scargle (1982) and Horne & Baliunas (1986):
3471/// 1. For each test frequency ω, compute the phase shift τ
3472/// 2. Compute the normalized power P(ω)
3473/// 3. Estimate false alarm probability using the exponential distribution
3474///
3475/// # Arguments
3476/// * `times` - Observation times (not necessarily evenly spaced)
3477/// * `values` - Observed values at each time
3478/// * `frequencies` - Optional frequencies to evaluate (cycles per unit time).
3479///   If None, automatically generates a frequency grid.
3480/// * `oversampling` - Oversampling factor for auto-generated frequency grid.
3481///   Default: 4.0. Higher values give finer frequency resolution.
3482/// * `nyquist_factor` - Maximum frequency as multiple of pseudo-Nyquist.
3483///   Default: 1.0.
3484///
3485/// # Returns
3486/// `LombScargleResult` with power spectrum and significance estimates.
3487///
3488/// # Example
3489/// ```rust
3490/// use fdars_core::seasonal::lomb_scargle;
3491/// use std::f64::consts::PI;
3492///
3493/// // Irregularly sampled sine wave
3494/// let times: Vec<f64> = vec![0.0, 0.3, 0.7, 1.2, 1.5, 2.1, 2.8, 3.0, 3.5, 4.0];
3495/// let period = 1.5;
3496/// let values: Vec<f64> = times.iter()
3497///     .map(|&t| (2.0 * PI * t / period).sin())
3498///     .collect();
3499///
3500/// let result = lomb_scargle(&times, &values, None, None, None);
3501/// assert!((result.peak_period - period).abs() < 0.2);
3502/// ```
3503pub fn lomb_scargle(
3504    times: &[f64],
3505    values: &[f64],
3506    frequencies: Option<&[f64]>,
3507    oversampling: Option<f64>,
3508    nyquist_factor: Option<f64>,
3509) -> LombScargleResult {
3510    let n = times.len();
3511    assert_eq!(n, values.len(), "times and values must have same length");
3512    assert!(n >= 3, "Need at least 3 data points");
3513
3514    // Compute mean and variance
3515    let mean_y: f64 = values.iter().sum::<f64>() / n as f64;
3516    let var_y: f64 = values.iter().map(|&y| (y - mean_y).powi(2)).sum::<f64>() / (n - 1) as f64;
3517
3518    // Generate frequency grid if not provided
3519    let freq_vec: Vec<f64>;
3520    let freqs = if let Some(f) = frequencies {
3521        f
3522    } else {
3523        freq_vec = generate_ls_frequencies(
3524            times,
3525            oversampling.unwrap_or(4.0),
3526            nyquist_factor.unwrap_or(1.0),
3527        );
3528        &freq_vec
3529    };
3530
3531    // Compute Lomb-Scargle power at each frequency
3532    let mut power = Vec::with_capacity(freqs.len());
3533
3534    for &freq in freqs.iter() {
3535        let omega = 2.0 * PI * freq;
3536        let p = lomb_scargle_single_freq(times, values, mean_y, var_y, omega);
3537        power.push(p);
3538    }
3539
3540    // Find peak
3541    let (peak_idx, &peak_power) = power
3542        .iter()
3543        .enumerate()
3544        .max_by(|(_, a), (_, b)| a.partial_cmp(b).unwrap())
3545        .unwrap_or((0, &0.0));
3546
3547    let peak_frequency = freqs.get(peak_idx).copied().unwrap_or(0.0);
3548    let peak_period = if peak_frequency > 0.0 {
3549        1.0 / peak_frequency
3550    } else {
3551        f64::INFINITY
3552    };
3553
3554    // Compute false alarm probability
3555    let n_indep = estimate_independent_frequencies(times, freqs.len());
3556    let fap = lomb_scargle_fap(peak_power, n_indep, n);
3557
3558    // Compute periods from frequencies
3559    let periods: Vec<f64> = freqs
3560        .iter()
3561        .map(|&f| if f > 0.0 { 1.0 / f } else { f64::INFINITY })
3562        .collect();
3563
3564    LombScargleResult {
3565        frequencies: freqs.to_vec(),
3566        periods,
3567        power,
3568        peak_period,
3569        peak_frequency,
3570        peak_power,
3571        false_alarm_probability: fap,
3572        significance: 1.0 - fap,
3573    }
3574}
3575
3576/// Compute Lomb-Scargle power at a single frequency.
3577///
3578/// Uses the Scargle (1982) normalization.
3579fn lomb_scargle_single_freq(
3580    times: &[f64],
3581    values: &[f64],
3582    mean_y: f64,
3583    var_y: f64,
3584    omega: f64,
3585) -> f64 {
3586    if var_y <= 0.0 || omega <= 0.0 {
3587        return 0.0;
3588    }
3589
3590    let n = times.len();
3591
3592    // Compute tau (phase shift) to make sine and cosine terms orthogonal
3593    let mut sum_sin2 = 0.0;
3594    let mut sum_cos2 = 0.0;
3595    for &t in times.iter() {
3596        let arg = 2.0 * omega * t;
3597        sum_sin2 += arg.sin();
3598        sum_cos2 += arg.cos();
3599    }
3600    let tau = (sum_sin2).atan2(sum_cos2) / (2.0 * omega);
3601
3602    // Compute sums for power calculation
3603    let mut ss = 0.0; // Sum of sin terms
3604    let mut sc = 0.0; // Sum of cos terms
3605    let mut css = 0.0; // Sum of cos^2
3606    let mut sss = 0.0; // Sum of sin^2
3607
3608    for i in 0..n {
3609        let y_centered = values[i] - mean_y;
3610        let arg = omega * (times[i] - tau);
3611        let c = arg.cos();
3612        let s = arg.sin();
3613
3614        sc += y_centered * c;
3615        ss += y_centered * s;
3616        css += c * c;
3617        sss += s * s;
3618    }
3619
3620    // Avoid division by zero
3621    let css = css.max(1e-15);
3622    let sss = sss.max(1e-15);
3623
3624    // Lomb-Scargle power (Scargle 1982 normalization)
3625    0.5 * (sc * sc / css + ss * ss / sss) / var_y
3626}
3627
3628/// Generate frequency grid for Lomb-Scargle.
3629///
3630/// The grid spans from 1/T_total to f_nyquist with oversampling.
3631fn generate_ls_frequencies(times: &[f64], oversampling: f64, nyquist_factor: f64) -> Vec<f64> {
3632    let n = times.len();
3633    if n < 2 {
3634        return vec![0.0];
3635    }
3636
3637    // Time span
3638    let t_min = times.iter().cloned().fold(f64::INFINITY, f64::min);
3639    let t_max = times.iter().cloned().fold(f64::NEG_INFINITY, f64::max);
3640    let t_span = (t_max - t_min).max(1e-10);
3641
3642    // Minimum frequency: one cycle over the observation span
3643    let f_min = 1.0 / t_span;
3644
3645    // Pseudo-Nyquist frequency for irregular data
3646    // Use average sampling rate as approximation
3647    let f_nyquist = 0.5 * (n - 1) as f64 / t_span;
3648
3649    // Maximum frequency
3650    let f_max = f_nyquist * nyquist_factor;
3651
3652    // Frequency resolution with oversampling
3653    let df = f_min / oversampling;
3654
3655    // Generate frequency grid
3656    let n_freq = ((f_max - f_min) / df).ceil() as usize + 1;
3657    let n_freq = n_freq.min(10000); // Cap to prevent memory issues
3658
3659    (0..n_freq).map(|i| f_min + i as f64 * df).collect()
3660}
3661
3662/// Estimate number of independent frequencies (for FAP calculation).
3663///
3664/// For irregularly sampled data, this is approximately the number of
3665/// data points (Horne & Baliunas 1986).
3666fn estimate_independent_frequencies(times: &[f64], n_freq: usize) -> usize {
3667    // A conservative estimate is min(n_data, n_frequencies)
3668    let n = times.len();
3669    n.min(n_freq)
3670}
3671
3672/// Compute false alarm probability for Lomb-Scargle peak.
3673///
3674/// Uses the exponential distribution approximation:
3675/// FAP ≈ 1 - (1 - exp(-z))^M
3676/// where z is the power and M is the number of independent frequencies.
3677fn lomb_scargle_fap(power: f64, n_indep: usize, _n_data: usize) -> f64 {
3678    if power <= 0.0 || n_indep == 0 {
3679        return 1.0;
3680    }
3681
3682    // Probability that a single frequency has power < z
3683    let prob_single = 1.0 - (-power).exp();
3684
3685    // Probability that all M frequencies have power < z
3686    // FAP = 1 - (1 - exp(-z))^M
3687    // For numerical stability, use log:
3688    // 1 - FAP = prob_single^M
3689    // FAP = 1 - exp(M * ln(prob_single))
3690
3691    if prob_single >= 1.0 {
3692        return 0.0; // Very significant
3693    }
3694    if prob_single <= 0.0 {
3695        return 1.0; // Not significant
3696    }
3697
3698    let log_prob = prob_single.ln();
3699    let log_cdf = n_indep as f64 * log_prob;
3700
3701    if log_cdf < -700.0 {
3702        0.0 // Numerical underflow, very significant
3703    } else {
3704        1.0 - log_cdf.exp()
3705    }
3706}
3707
3708/// Compute Lomb-Scargle periodogram for functional data (multiple curves).
3709///
3710/// Computes the periodogram for each curve and returns the result for the
3711/// mean curve or ensemble statistics.
3712///
3713/// # Arguments
3714/// * `data` - Column-major matrix (n x m) of functional data
3715/// * `n` - Number of samples (rows)
3716/// * `m` - Number of evaluation points (columns)
3717/// * `argvals` - Time points of length m
3718/// * `oversampling` - Oversampling factor. Default: 4.0
3719/// * `nyquist_factor` - Maximum frequency multiplier. Default: 1.0
3720///
3721/// # Returns
3722/// `LombScargleResult` computed from the mean curve.
3723pub fn lomb_scargle_fdata(
3724    data: &[f64],
3725    n: usize,
3726    m: usize,
3727    argvals: &[f64],
3728    oversampling: Option<f64>,
3729    nyquist_factor: Option<f64>,
3730) -> LombScargleResult {
3731    // Compute mean curve
3732    let mean_curve = compute_mean_curve(data, n, m);
3733
3734    // Run Lomb-Scargle on mean curve
3735    lomb_scargle(argvals, &mean_curve, None, oversampling, nyquist_factor)
3736}
3737
3738// ============================================================================
3739// Matrix Profile (STOMP Algorithm)
3740// ============================================================================
3741
3742/// Result of Matrix Profile computation.
3743#[derive(Debug, Clone)]
3744pub struct MatrixProfileResult {
3745    /// The matrix profile (minimum z-normalized distance for each position)
3746    pub profile: Vec<f64>,
3747    /// Index of the nearest neighbor for each position
3748    pub profile_index: Vec<usize>,
3749    /// Subsequence length used
3750    pub subsequence_length: usize,
3751    /// Detected periods from arc analysis
3752    pub detected_periods: Vec<f64>,
3753    /// Arc counts at each index distance (for period detection)
3754    pub arc_counts: Vec<usize>,
3755    /// Most prominent detected period
3756    pub primary_period: f64,
3757    /// Confidence score for primary period (based on arc prominence)
3758    pub confidence: f64,
3759}
3760
3761/// Compute Matrix Profile using STOMP algorithm (Scalable Time series Ordered-search Matrix Profile).
3762///
3763/// The Matrix Profile is a data structure that stores the z-normalized Euclidean distance
3764/// between each subsequence of a time series and its nearest neighbor. It enables efficient
3765/// motif discovery and anomaly detection.
3766///
3767/// # Algorithm (STOMP - Zhu et al. 2016)
3768/// 1. Pre-compute sliding mean and standard deviation using cumulative sums
3769/// 2. Use FFT to compute first row of distance matrix
3770/// 3. Update subsequent rows incrementally using the dot product update rule
3771/// 4. Track minimum distance and index at each position
3772///
3773/// # Arguments
3774/// * `values` - Time series values
3775/// * `subsequence_length` - Length of subsequences to compare (window size)
3776/// * `exclusion_zone` - Fraction of subsequence length to exclude around each position
3777///   to prevent trivial self-matches. Default: 0.5
3778///
3779/// # Returns
3780/// `MatrixProfileResult` with profile, indices, and detected periods.
3781///
3782/// # Example
3783/// ```rust
3784/// use fdars_core::seasonal::matrix_profile;
3785/// use std::f64::consts::PI;
3786///
3787/// // Periodic signal
3788/// let period = 20.0;
3789/// let values: Vec<f64> = (0..100)
3790///     .map(|i| (2.0 * PI * i as f64 / period).sin())
3791///     .collect();
3792///
3793/// let result = matrix_profile(&values, Some(15), None);
3794/// assert!((result.primary_period - period).abs() < 5.0);
3795/// ```
3796pub fn matrix_profile(
3797    values: &[f64],
3798    subsequence_length: Option<usize>,
3799    exclusion_zone: Option<f64>,
3800) -> MatrixProfileResult {
3801    let n = values.len();
3802
3803    // Default subsequence length: ~ 1/4 of series length, capped at reasonable range
3804    let m = subsequence_length.unwrap_or_else(|| {
3805        let default_m = n / 4;
3806        default_m.max(4).min(n / 2)
3807    });
3808
3809    assert!(m >= 3, "Subsequence length must be at least 3");
3810    assert!(
3811        m <= n / 2,
3812        "Subsequence length must be at most half the series length"
3813    );
3814
3815    let exclusion_zone = exclusion_zone.unwrap_or(0.5);
3816    let exclusion_radius = (m as f64 * exclusion_zone).ceil() as usize;
3817
3818    // Number of subsequences
3819    let profile_len = n - m + 1;
3820
3821    // Compute sliding statistics
3822    let (means, stds) = compute_sliding_stats(values, m);
3823
3824    // Compute the matrix profile using STOMP
3825    let (profile, profile_index) = stomp_core(values, m, &means, &stds, exclusion_radius);
3826
3827    // Perform arc analysis to detect periods
3828    let (arc_counts, detected_periods, primary_period, confidence) =
3829        analyze_arcs(&profile_index, profile_len, m);
3830
3831    MatrixProfileResult {
3832        profile,
3833        profile_index,
3834        subsequence_length: m,
3835        detected_periods,
3836        arc_counts,
3837        primary_period,
3838        confidence,
3839    }
3840}
3841
3842/// Compute sliding mean and standard deviation using cumulative sums.
3843///
3844/// This is O(n) and avoids numerical issues with naive implementations.
3845fn compute_sliding_stats(values: &[f64], m: usize) -> (Vec<f64>, Vec<f64>) {
3846    let n = values.len();
3847    let profile_len = n - m + 1;
3848
3849    // Compute cumulative sums
3850    let mut cumsum = vec![0.0; n + 1];
3851    let mut cumsum_sq = vec![0.0; n + 1];
3852
3853    for i in 0..n {
3854        cumsum[i + 1] = cumsum[i] + values[i];
3855        cumsum_sq[i + 1] = cumsum_sq[i] + values[i] * values[i];
3856    }
3857
3858    // Compute means and stds
3859    let mut means = Vec::with_capacity(profile_len);
3860    let mut stds = Vec::with_capacity(profile_len);
3861
3862    let m_f64 = m as f64;
3863
3864    for i in 0..profile_len {
3865        let sum = cumsum[i + m] - cumsum[i];
3866        let sum_sq = cumsum_sq[i + m] - cumsum_sq[i];
3867
3868        let mean = sum / m_f64;
3869        let variance = (sum_sq / m_f64) - mean * mean;
3870        let std = variance.max(0.0).sqrt();
3871
3872        means.push(mean);
3873        stds.push(std.max(1e-10)); // Prevent division by zero
3874    }
3875
3876    (means, stds)
3877}
3878
3879/// Core STOMP algorithm implementation.
3880///
3881/// Uses FFT for the first row and incremental updates for subsequent rows.
3882fn stomp_core(
3883    values: &[f64],
3884    m: usize,
3885    means: &[f64],
3886    stds: &[f64],
3887    exclusion_radius: usize,
3888) -> (Vec<f64>, Vec<usize>) {
3889    let n = values.len();
3890    let profile_len = n - m + 1;
3891
3892    // Initialize profile with infinity and index with 0
3893    let mut profile = vec![f64::INFINITY; profile_len];
3894    let mut profile_index = vec![0usize; profile_len];
3895
3896    // Compute first row using direct computation (could use FFT for large n)
3897    // QT[0,j] = sum(T[0:m] * T[j:j+m]) for each j
3898    let mut qt = vec![0.0; profile_len];
3899
3900    // First query subsequence
3901    for j in 0..profile_len {
3902        let mut dot = 0.0;
3903        for k in 0..m {
3904            dot += values[k] * values[j + k];
3905        }
3906        qt[j] = dot;
3907    }
3908
3909    // Process first row
3910    update_profile_row(
3911        0,
3912        &qt,
3913        means,
3914        stds,
3915        m,
3916        exclusion_radius,
3917        &mut profile,
3918        &mut profile_index,
3919    );
3920
3921    // Process subsequent rows using incremental updates
3922    for i in 1..profile_len {
3923        // Update QT using the sliding dot product update
3924        // QT[i,j] = QT[i-1,j-1] - T[i-1]*T[j-1] + T[i+m-1]*T[j+m-1]
3925        let mut qt_new = vec![0.0; profile_len];
3926
3927        // First element needs direct computation
3928        let mut dot = 0.0;
3929        for k in 0..m {
3930            dot += values[i + k] * values[k];
3931        }
3932        qt_new[0] = dot;
3933
3934        // Update rest using incremental formula
3935        for j in 1..profile_len {
3936            qt_new[j] =
3937                qt[j - 1] - values[i - 1] * values[j - 1] + values[i + m - 1] * values[j + m - 1];
3938        }
3939
3940        qt = qt_new;
3941
3942        // Update profile with this row
3943        update_profile_row(
3944            i,
3945            &qt,
3946            means,
3947            stds,
3948            m,
3949            exclusion_radius,
3950            &mut profile,
3951            &mut profile_index,
3952        );
3953    }
3954
3955    (profile, profile_index)
3956}
3957
3958/// Update profile with distances from row i.
3959fn update_profile_row(
3960    i: usize,
3961    qt: &[f64],
3962    means: &[f64],
3963    stds: &[f64],
3964    m: usize,
3965    exclusion_radius: usize,
3966    profile: &mut [f64],
3967    profile_index: &mut [usize],
3968) {
3969    let profile_len = profile.len();
3970    let m_f64 = m as f64;
3971
3972    for j in 0..profile_len {
3973        // Skip exclusion zone
3974        if i.abs_diff(j) <= exclusion_radius {
3975            continue;
3976        }
3977
3978        // Compute z-normalized distance
3979        // d = sqrt(2*m * (1 - (QT - m*mu_i*mu_j) / (m * sigma_i * sigma_j)))
3980        let numerator = qt[j] - m_f64 * means[i] * means[j];
3981        let denominator = m_f64 * stds[i] * stds[j];
3982
3983        let pearson = if denominator > 0.0 {
3984            (numerator / denominator).clamp(-1.0, 1.0)
3985        } else {
3986            0.0
3987        };
3988
3989        let dist_sq = 2.0 * m_f64 * (1.0 - pearson);
3990        let dist = dist_sq.max(0.0).sqrt();
3991
3992        // Update profile for position i
3993        if dist < profile[i] {
3994            profile[i] = dist;
3995            profile_index[i] = j;
3996        }
3997
3998        // Update profile for position j (symmetric)
3999        if dist < profile[j] {
4000            profile[j] = dist;
4001            profile_index[j] = i;
4002        }
4003    }
4004}
4005
4006/// Analyze profile index to detect periods using arc counting.
4007///
4008/// Arcs connect each position to its nearest neighbor. The distance between
4009/// connected positions reveals repeating patterns (periods).
4010fn analyze_arcs(
4011    profile_index: &[usize],
4012    profile_len: usize,
4013    m: usize,
4014) -> (Vec<usize>, Vec<f64>, f64, f64) {
4015    // Count arcs at each index distance
4016    let max_distance = profile_len;
4017    let mut arc_counts = vec![0usize; max_distance];
4018
4019    for (i, &j) in profile_index.iter().enumerate() {
4020        let distance = i.abs_diff(j);
4021        if distance < max_distance {
4022            arc_counts[distance] += 1;
4023        }
4024    }
4025
4026    // Find peaks in arc counts (candidate periods)
4027    let min_period = m / 2; // Minimum meaningful period
4028    let mut peaks: Vec<(usize, usize)> = Vec::new();
4029
4030    // Simple peak detection with minimum spacing
4031    for i in min_period..arc_counts.len().saturating_sub(1) {
4032        if arc_counts[i] > arc_counts[i.saturating_sub(1)]
4033            && arc_counts[i] > arc_counts[(i + 1).min(arc_counts.len() - 1)]
4034            && arc_counts[i] >= 3
4035        // Minimum count threshold
4036        {
4037            peaks.push((i, arc_counts[i]));
4038        }
4039    }
4040
4041    // Sort by count descending
4042    peaks.sort_by(|a, b| b.1.cmp(&a.1));
4043
4044    // Extract top periods
4045    let detected_periods: Vec<f64> = peaks.iter().take(5).map(|(p, _)| *p as f64).collect();
4046
4047    // Primary period and confidence
4048    let (primary_period, confidence) = if let Some(&(period, count)) = peaks.first() {
4049        // Confidence based on relative peak prominence
4050        let total_arcs: usize = arc_counts[min_period..].iter().sum();
4051        let conf = if total_arcs > 0 {
4052            count as f64 / total_arcs as f64
4053        } else {
4054            0.0
4055        };
4056        (period as f64, conf.min(1.0))
4057    } else {
4058        (0.0, 0.0)
4059    };
4060
4061    (arc_counts, detected_periods, primary_period, confidence)
4062}
4063
4064/// Compute Matrix Profile for functional data (multiple curves).
4065///
4066/// Computes the matrix profile for each curve and returns aggregated results.
4067///
4068/// # Arguments
4069/// * `data` - Column-major matrix (n x m) of functional data
4070/// * `n` - Number of samples (rows)
4071/// * `m` - Number of evaluation points (columns)
4072/// * `subsequence_length` - Length of subsequences. If None, automatically determined.
4073/// * `exclusion_zone` - Exclusion zone fraction. Default: 0.5
4074///
4075/// # Returns
4076/// `MatrixProfileResult` computed from the mean curve.
4077pub fn matrix_profile_fdata(
4078    data: &[f64],
4079    n: usize,
4080    m: usize,
4081    subsequence_length: Option<usize>,
4082    exclusion_zone: Option<f64>,
4083) -> MatrixProfileResult {
4084    // Compute mean curve
4085    let mean_curve = compute_mean_curve(data, n, m);
4086
4087    // Run matrix profile on mean curve
4088    matrix_profile(&mean_curve, subsequence_length, exclusion_zone)
4089}
4090
4091/// Detect seasonality using Matrix Profile analysis.
4092///
4093/// Returns true if significant periodicity is detected based on matrix profile analysis.
4094///
4095/// # Arguments
4096/// * `values` - Time series values
4097/// * `subsequence_length` - Length of subsequences to compare
4098/// * `confidence_threshold` - Minimum confidence for positive detection. Default: 0.1
4099///
4100/// # Returns
4101/// Tuple of (is_seasonal, detected_period, confidence)
4102pub fn matrix_profile_seasonality(
4103    values: &[f64],
4104    subsequence_length: Option<usize>,
4105    confidence_threshold: Option<f64>,
4106) -> (bool, f64, f64) {
4107    let result = matrix_profile(values, subsequence_length, None);
4108
4109    let threshold = confidence_threshold.unwrap_or(0.1);
4110    let is_seasonal = result.confidence >= threshold && result.primary_period > 0.0;
4111
4112    (is_seasonal, result.primary_period, result.confidence)
4113}
4114
4115// ============================================================================
4116// Singular Spectrum Analysis (SSA)
4117// ============================================================================
4118
4119/// Result of Singular Spectrum Analysis.
4120#[derive(Debug, Clone)]
4121pub struct SsaResult {
4122    /// Reconstructed trend component
4123    pub trend: Vec<f64>,
4124    /// Reconstructed seasonal/periodic component
4125    pub seasonal: Vec<f64>,
4126    /// Noise/residual component
4127    pub noise: Vec<f64>,
4128    /// Singular values from SVD (sorted descending)
4129    pub singular_values: Vec<f64>,
4130    /// Contribution of each component (proportion of variance)
4131    pub contributions: Vec<f64>,
4132    /// Window length used for embedding
4133    pub window_length: usize,
4134    /// Number of components extracted
4135    pub n_components: usize,
4136    /// Detected period (if any significant periodicity found)
4137    pub detected_period: f64,
4138    /// Confidence score for detected period
4139    pub confidence: f64,
4140}
4141
4142/// Singular Spectrum Analysis (SSA) for time series decomposition.
4143///
4144/// SSA is a model-free, non-parametric method for decomposing a time series
4145/// into trend, oscillatory (seasonal), and noise components using singular
4146/// value decomposition of the trajectory matrix.
4147///
4148/// # Algorithm
4149/// 1. **Embedding**: Convert series into trajectory matrix using sliding windows
4150/// 2. **Decomposition**: SVD of trajectory matrix
4151/// 3. **Grouping**: Identify trend vs. periodic vs. noise components
4152/// 4. **Reconstruction**: Diagonal averaging to recover time series
4153///
4154/// # Arguments
4155/// * `values` - Time series values
4156/// * `window_length` - Embedding window length (L). If None, uses L = min(n/2, 50).
4157///   Larger values capture longer-term patterns but need longer series.
4158/// * `n_components` - Number of components to extract. If None, uses 10.
4159/// * `trend_components` - Indices of components for trend (0-based). If None, auto-detect.
4160/// * `seasonal_components` - Indices of components for seasonal. If None, auto-detect.
4161///
4162/// # Returns
4163/// `SsaResult` with decomposed components and diagnostics.
4164///
4165/// # Example
4166/// ```rust
4167/// use fdars_core::seasonal::ssa;
4168/// use std::f64::consts::PI;
4169///
4170/// // Signal with trend + seasonal + noise
4171/// let n = 100;
4172/// let values: Vec<f64> = (0..n)
4173///     .map(|i| {
4174///         let t = i as f64;
4175///         0.01 * t + (2.0 * PI * t / 12.0).sin() + 0.1 * (i as f64 * 0.1).sin()
4176///     })
4177///     .collect();
4178///
4179/// let result = ssa(&values, None, None, None, None);
4180/// assert!(result.detected_period > 0.0);
4181/// ```
4182pub fn ssa(
4183    values: &[f64],
4184    window_length: Option<usize>,
4185    n_components: Option<usize>,
4186    trend_components: Option<&[usize]>,
4187    seasonal_components: Option<&[usize]>,
4188) -> SsaResult {
4189    let n = values.len();
4190
4191    // Default window length: min(n/2, 50)
4192    let l = window_length.unwrap_or_else(|| (n / 2).clamp(2, 50));
4193
4194    if n < 4 || l < 2 || l > n / 2 {
4195        return SsaResult {
4196            trend: values.to_vec(),
4197            seasonal: vec![0.0; n],
4198            noise: vec![0.0; n],
4199            singular_values: vec![],
4200            contributions: vec![],
4201            window_length: l,
4202            n_components: 0,
4203            detected_period: 0.0,
4204            confidence: 0.0,
4205        };
4206    }
4207
4208    // Number of columns in trajectory matrix
4209    let k = n - l + 1;
4210
4211    // Step 1: Embedding - create trajectory matrix (L x K)
4212    let trajectory = embed_trajectory(values, l, k);
4213
4214    // Step 2: SVD decomposition
4215    let (u, sigma, vt) = svd_decompose(&trajectory, l, k);
4216
4217    // Determine number of components to use
4218    let max_components = sigma.len();
4219    let n_comp = n_components.unwrap_or(10).min(max_components);
4220
4221    // Compute contributions (proportion of total variance)
4222    let total_var: f64 = sigma.iter().map(|&s| s * s).sum();
4223    let contributions: Vec<f64> = sigma
4224        .iter()
4225        .take(n_comp)
4226        .map(|&s| s * s / total_var.max(1e-15))
4227        .collect();
4228
4229    // Step 3: Grouping - identify trend and seasonal components
4230    let (trend_idx, seasonal_idx, detected_period, confidence) =
4231        if trend_components.is_some() || seasonal_components.is_some() {
4232            // Use provided groupings
4233            let t_idx: Vec<usize> = trend_components.map(|v| v.to_vec()).unwrap_or_default();
4234            let s_idx: Vec<usize> = seasonal_components.map(|v| v.to_vec()).unwrap_or_default();
4235            (t_idx, s_idx, 0.0, 0.0)
4236        } else {
4237            // Auto-detect groupings
4238            auto_group_ssa_components(&u, &sigma, l, k, n_comp)
4239        };
4240
4241    // Step 4: Reconstruction via diagonal averaging
4242    let trend = reconstruct_grouped(&u, &sigma, &vt, l, k, n, &trend_idx);
4243    let seasonal = reconstruct_grouped(&u, &sigma, &vt, l, k, n, &seasonal_idx);
4244
4245    // Noise is the remainder
4246    let noise: Vec<f64> = values
4247        .iter()
4248        .zip(trend.iter())
4249        .zip(seasonal.iter())
4250        .map(|((&y, &t), &s)| y - t - s)
4251        .collect();
4252
4253    SsaResult {
4254        trend,
4255        seasonal,
4256        noise,
4257        singular_values: sigma.into_iter().take(n_comp).collect(),
4258        contributions,
4259        window_length: l,
4260        n_components: n_comp,
4261        detected_period,
4262        confidence,
4263    }
4264}
4265
4266/// Create trajectory matrix by embedding the time series.
4267fn embed_trajectory(values: &[f64], l: usize, k: usize) -> Vec<f64> {
4268    // Trajectory matrix is L x K, stored column-major
4269    let mut trajectory = vec![0.0; l * k];
4270
4271    for j in 0..k {
4272        for i in 0..l {
4273            trajectory[i + j * l] = values[i + j];
4274        }
4275    }
4276
4277    trajectory
4278}
4279
4280/// SVD decomposition of trajectory matrix using nalgebra.
4281fn svd_decompose(trajectory: &[f64], l: usize, k: usize) -> (Vec<f64>, Vec<f64>, Vec<f64>) {
4282    use nalgebra::{DMatrix, SVD};
4283
4284    // Create nalgebra matrix (column-major)
4285    let mat = DMatrix::from_column_slice(l, k, trajectory);
4286
4287    // Compute SVD
4288    let svd = SVD::new(mat, true, true);
4289
4290    // Extract components
4291    let u_mat = svd.u.unwrap();
4292    let vt_mat = svd.v_t.unwrap();
4293    let sigma = svd.singular_values;
4294
4295    // Convert to flat vectors
4296    let u: Vec<f64> = u_mat.iter().cloned().collect();
4297    let sigma_vec: Vec<f64> = sigma.iter().cloned().collect();
4298    let vt: Vec<f64> = vt_mat.iter().cloned().collect();
4299
4300    (u, sigma_vec, vt)
4301}
4302
4303enum SsaComponentKind {
4304    Trend,
4305    Seasonal(f64),
4306    Noise,
4307}
4308
4309/// Classify an SSA component as trend, seasonal, or noise.
4310fn classify_ssa_component(u_col: &[f64], trend_count: usize) -> SsaComponentKind {
4311    if is_trend_component(u_col) && trend_count < 2 {
4312        SsaComponentKind::Trend
4313    } else {
4314        let (is_periodic, period) = is_periodic_component(u_col);
4315        if is_periodic {
4316            SsaComponentKind::Seasonal(period)
4317        } else {
4318            SsaComponentKind::Noise
4319        }
4320    }
4321}
4322
4323/// Apply default groupings when auto-detection finds nothing.
4324fn apply_ssa_grouping_defaults(
4325    trend_idx: &mut Vec<usize>,
4326    seasonal_idx: &mut Vec<usize>,
4327    n_comp: usize,
4328) {
4329    if trend_idx.is_empty() && n_comp > 0 {
4330        trend_idx.push(0);
4331    }
4332    if seasonal_idx.is_empty() && n_comp >= 3 {
4333        seasonal_idx.push(1);
4334        if n_comp > 2 {
4335            seasonal_idx.push(2);
4336        }
4337    }
4338}
4339
4340/// Auto-detect trend and seasonal component groupings.
4341fn auto_group_ssa_components(
4342    u: &[f64],
4343    sigma: &[f64],
4344    l: usize,
4345    _k: usize,
4346    n_comp: usize,
4347) -> (Vec<usize>, Vec<usize>, f64, f64) {
4348    let mut trend_idx = Vec::new();
4349    let mut seasonal_idx = Vec::new();
4350    let mut detected_period = 0.0;
4351    let mut confidence = 0.0;
4352
4353    for i in 0..n_comp.min(sigma.len()) {
4354        let u_col: Vec<f64> = (0..l).map(|j| u[j + i * l]).collect();
4355        match classify_ssa_component(&u_col, trend_idx.len()) {
4356            SsaComponentKind::Trend => trend_idx.push(i),
4357            SsaComponentKind::Seasonal(period) => {
4358                seasonal_idx.push(i);
4359                if detected_period == 0.0 && period > 0.0 {
4360                    detected_period = period;
4361                    confidence = sigma[i] / sigma[0].max(1e-15);
4362                }
4363            }
4364            SsaComponentKind::Noise => {}
4365        }
4366    }
4367
4368    apply_ssa_grouping_defaults(&mut trend_idx, &mut seasonal_idx, n_comp);
4369    (trend_idx, seasonal_idx, detected_period, confidence)
4370}
4371
4372/// Check if a singular vector represents a trend component.
4373fn is_trend_component(u_col: &[f64]) -> bool {
4374    let n = u_col.len();
4375    if n < 3 {
4376        return false;
4377    }
4378
4379    // Count sign changes in the vector
4380    let mut sign_changes = 0;
4381    for i in 1..n {
4382        if u_col[i] * u_col[i - 1] < 0.0 {
4383            sign_changes += 1;
4384        }
4385    }
4386
4387    // Trend components have very few sign changes
4388    sign_changes <= n / 10
4389}
4390
4391/// Check if a singular vector represents a periodic component.
4392fn is_periodic_component(u_col: &[f64]) -> (bool, f64) {
4393    let n = u_col.len();
4394    if n < 4 {
4395        return (false, 0.0);
4396    }
4397
4398    // Use autocorrelation to detect periodicity
4399    let mean: f64 = u_col.iter().sum::<f64>() / n as f64;
4400    let centered: Vec<f64> = u_col.iter().map(|&x| x - mean).collect();
4401
4402    let var: f64 = centered.iter().map(|&x| x * x).sum();
4403    if var < 1e-15 {
4404        return (false, 0.0);
4405    }
4406
4407    // Find first significant peak in autocorrelation
4408    let mut best_period = 0.0;
4409    let mut best_acf = 0.0;
4410
4411    for lag in 2..n / 2 {
4412        let mut acf = 0.0;
4413        for i in 0..(n - lag) {
4414            acf += centered[i] * centered[i + lag];
4415        }
4416        acf /= var;
4417
4418        if acf > best_acf && acf > 0.3 {
4419            best_acf = acf;
4420            best_period = lag as f64;
4421        }
4422    }
4423
4424    let is_periodic = best_acf > 0.3 && best_period > 0.0;
4425    (is_periodic, best_period)
4426}
4427
4428/// Reconstruct time series from grouped components via diagonal averaging.
4429fn reconstruct_grouped(
4430    u: &[f64],
4431    sigma: &[f64],
4432    vt: &[f64],
4433    l: usize,
4434    k: usize,
4435    n: usize,
4436    group_idx: &[usize],
4437) -> Vec<f64> {
4438    if group_idx.is_empty() {
4439        return vec![0.0; n];
4440    }
4441
4442    // Sum of rank-1 matrices for this group
4443    let mut grouped_matrix = vec![0.0; l * k];
4444
4445    for &idx in group_idx {
4446        if idx >= sigma.len() {
4447            continue;
4448        }
4449
4450        let s = sigma[idx];
4451
4452        // Add s * u_i * v_i^T
4453        for j in 0..k {
4454            for i in 0..l {
4455                let u_val = u[i + idx * l];
4456                let v_val = vt[idx + j * sigma.len().min(l)]; // v_t is stored as K x min(L,K)
4457                grouped_matrix[i + j * l] += s * u_val * v_val;
4458            }
4459        }
4460    }
4461
4462    // Diagonal averaging (Hankelization)
4463    diagonal_average(&grouped_matrix, l, k, n)
4464}
4465
4466/// Diagonal averaging to convert trajectory matrix back to time series.
4467fn diagonal_average(matrix: &[f64], l: usize, k: usize, n: usize) -> Vec<f64> {
4468    let mut result = vec![0.0; n];
4469    let mut counts = vec![0.0; n];
4470
4471    // Average along anti-diagonals
4472    for j in 0..k {
4473        for i in 0..l {
4474            let idx = i + j; // Position in original series
4475            if idx < n {
4476                result[idx] += matrix[i + j * l];
4477                counts[idx] += 1.0;
4478            }
4479        }
4480    }
4481
4482    // Normalize by counts
4483    for i in 0..n {
4484        if counts[i] > 0.0 {
4485            result[i] /= counts[i];
4486        }
4487    }
4488
4489    result
4490}
4491
4492/// Compute SSA for functional data (multiple curves).
4493///
4494/// # Arguments
4495/// * `data` - Column-major matrix (n x m) of functional data
4496/// * `n` - Number of samples (rows)
4497/// * `m` - Number of evaluation points (columns)
4498/// * `window_length` - SSA window length. If None, auto-determined.
4499/// * `n_components` - Number of SSA components. Default: 10.
4500///
4501/// # Returns
4502/// `SsaResult` computed from the mean curve.
4503pub fn ssa_fdata(
4504    data: &[f64],
4505    n: usize,
4506    m: usize,
4507    window_length: Option<usize>,
4508    n_components: Option<usize>,
4509) -> SsaResult {
4510    // Compute mean curve
4511    let mean_curve = compute_mean_curve(data, n, m);
4512
4513    // Run SSA on mean curve
4514    ssa(&mean_curve, window_length, n_components, None, None)
4515}
4516
4517/// Detect seasonality using SSA.
4518///
4519/// # Arguments
4520/// * `values` - Time series values
4521/// * `window_length` - SSA window length
4522/// * `confidence_threshold` - Minimum confidence for positive detection
4523///
4524/// # Returns
4525/// Tuple of (is_seasonal, detected_period, confidence)
4526pub fn ssa_seasonality(
4527    values: &[f64],
4528    window_length: Option<usize>,
4529    confidence_threshold: Option<f64>,
4530) -> (bool, f64, f64) {
4531    let result = ssa(values, window_length, None, None, None);
4532
4533    let threshold = confidence_threshold.unwrap_or(0.1);
4534    let is_seasonal = result.confidence >= threshold && result.detected_period > 0.0;
4535
4536    (is_seasonal, result.detected_period, result.confidence)
4537}
4538
4539#[cfg(test)]
4540mod tests {
4541    use super::*;
4542    use std::f64::consts::PI;
4543
4544    fn generate_sine(n: usize, m: usize, period: f64, argvals: &[f64]) -> Vec<f64> {
4545        let mut data = vec![0.0; n * m];
4546        for i in 0..n {
4547            for j in 0..m {
4548                data[i + j * n] = (2.0 * PI * argvals[j] / period).sin();
4549            }
4550        }
4551        data
4552    }
4553
4554    #[test]
4555    fn test_period_estimation_fft() {
4556        let m = 200;
4557        let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.1).collect();
4558        let period = 2.0;
4559        let data = generate_sine(1, m, period, &argvals);
4560
4561        let estimate = estimate_period_fft(&data, 1, m, &argvals);
4562        assert!((estimate.period - period).abs() < 0.2);
4563        assert!(estimate.confidence > 1.0);
4564    }
4565
4566    #[test]
4567    fn test_peak_detection() {
4568        let m = 100;
4569        let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.1).collect();
4570        let period = 2.0;
4571        let data = generate_sine(1, m, period, &argvals);
4572
4573        let result = detect_peaks(&data, 1, m, &argvals, Some(1.5), None, false, None);
4574
4575        // Should find approximately 5 peaks (10 / 2)
4576        assert!(!result.peaks[0].is_empty());
4577        assert!((result.mean_period - period).abs() < 0.3);
4578    }
4579
4580    #[test]
4581    fn test_peak_detection_known_sine() {
4582        // Pure sine wave: sin(2*pi*t/2) on [0, 10]
4583        // Peaks occur at t = period/4 + k*period = 0.5, 2.5, 4.5, 6.5, 8.5
4584        let m = 200; // High resolution for accurate detection
4585        let period = 2.0;
4586        let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 10.0 / (m - 1) as f64).collect();
4587        let data: Vec<f64> = argvals
4588            .iter()
4589            .map(|&t| (2.0 * std::f64::consts::PI * t / period).sin())
4590            .collect();
4591
4592        let result = detect_peaks(&data, 1, m, &argvals, None, None, false, None);
4593
4594        // Should find exactly 5 peaks
4595        assert_eq!(
4596            result.peaks[0].len(),
4597            5,
4598            "Expected 5 peaks, got {}. Peak times: {:?}",
4599            result.peaks[0].len(),
4600            result.peaks[0].iter().map(|p| p.time).collect::<Vec<_>>()
4601        );
4602
4603        // Check peak locations are close to expected
4604        let expected_times = [0.5, 2.5, 4.5, 6.5, 8.5];
4605        for (peak, expected) in result.peaks[0].iter().zip(expected_times.iter()) {
4606            assert!(
4607                (peak.time - expected).abs() < 0.15,
4608                "Peak at {:.3} not close to expected {:.3}",
4609                peak.time,
4610                expected
4611            );
4612        }
4613
4614        // Check mean period
4615        assert!(
4616            (result.mean_period - period).abs() < 0.1,
4617            "Mean period {:.3} not close to expected {:.3}",
4618            result.mean_period,
4619            period
4620        );
4621    }
4622
4623    #[test]
4624    fn test_peak_detection_with_min_distance() {
4625        // Same sine wave but with min_distance constraint
4626        let m = 200;
4627        let period = 2.0;
4628        let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 10.0 / (m - 1) as f64).collect();
4629        let data: Vec<f64> = argvals
4630            .iter()
4631            .map(|&t| (2.0 * std::f64::consts::PI * t / period).sin())
4632            .collect();
4633
4634        // min_distance = 1.5 should still find all 5 peaks (spacing = 2.0)
4635        let result = detect_peaks(&data, 1, m, &argvals, Some(1.5), None, false, None);
4636        assert_eq!(
4637            result.peaks[0].len(),
4638            5,
4639            "With min_distance=1.5, expected 5 peaks, got {}",
4640            result.peaks[0].len()
4641        );
4642
4643        // min_distance = 2.5 should find fewer peaks
4644        let result2 = detect_peaks(&data, 1, m, &argvals, Some(2.5), None, false, None);
4645        assert!(
4646            result2.peaks[0].len() < 5,
4647            "With min_distance=2.5, expected fewer than 5 peaks, got {}",
4648            result2.peaks[0].len()
4649        );
4650    }
4651
4652    #[test]
4653    fn test_peak_detection_period_1() {
4654        // Higher frequency: sin(2*pi*t/1) on [0, 10]
4655        // Peaks at t = 0.25, 1.25, 2.25, ..., 9.25 (10 peaks)
4656        let m = 400; // Higher resolution for higher frequency
4657        let period = 1.0;
4658        let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 10.0 / (m - 1) as f64).collect();
4659        let data: Vec<f64> = argvals
4660            .iter()
4661            .map(|&t| (2.0 * std::f64::consts::PI * t / period).sin())
4662            .collect();
4663
4664        let result = detect_peaks(&data, 1, m, &argvals, None, None, false, None);
4665
4666        // Should find 10 peaks
4667        assert_eq!(
4668            result.peaks[0].len(),
4669            10,
4670            "Expected 10 peaks, got {}",
4671            result.peaks[0].len()
4672        );
4673
4674        // Check mean period
4675        assert!(
4676            (result.mean_period - period).abs() < 0.1,
4677            "Mean period {:.3} not close to expected {:.3}",
4678            result.mean_period,
4679            period
4680        );
4681    }
4682
4683    #[test]
4684    fn test_peak_detection_shifted_sine() {
4685        // Shifted sine: sin(2*pi*t/2) + 1 on [0, 10]
4686        // Same peak locations, just shifted up
4687        let m = 200;
4688        let period = 2.0;
4689        let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 10.0 / (m - 1) as f64).collect();
4690        let data: Vec<f64> = argvals
4691            .iter()
4692            .map(|&t| (2.0 * std::f64::consts::PI * t / period).sin() + 1.0)
4693            .collect();
4694
4695        let result = detect_peaks(&data, 1, m, &argvals, None, None, false, None);
4696
4697        // Should still find 5 peaks
4698        assert_eq!(
4699            result.peaks[0].len(),
4700            5,
4701            "Expected 5 peaks for shifted sine, got {}",
4702            result.peaks[0].len()
4703        );
4704
4705        // Peak values should be around 2.0 (max of sin + 1)
4706        for peak in &result.peaks[0] {
4707            assert!(
4708                (peak.value - 2.0).abs() < 0.05,
4709                "Peak value {:.3} not close to expected 2.0",
4710                peak.value
4711            );
4712        }
4713    }
4714
4715    #[test]
4716    fn test_peak_detection_prominence() {
4717        // Create signal with peaks of different heights
4718        // Large peaks at odd positions, small peaks at even positions
4719        let m = 200;
4720        let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 10.0 / (m - 1) as f64).collect();
4721        let data: Vec<f64> = argvals
4722            .iter()
4723            .map(|&t| {
4724                let base = (2.0 * std::f64::consts::PI * t / 2.0).sin();
4725                // Add small ripples
4726                let ripple = 0.1 * (2.0 * std::f64::consts::PI * t * 4.0).sin();
4727                base + ripple
4728            })
4729            .collect();
4730
4731        // Without prominence filter, may find extra peaks from ripples
4732        let result_no_filter = detect_peaks(&data, 1, m, &argvals, None, None, false, None);
4733
4734        // With prominence filter, should only find major peaks
4735        let result_filtered = detect_peaks(&data, 1, m, &argvals, None, Some(0.5), false, None);
4736
4737        // Filtered should have fewer or equal peaks
4738        assert!(
4739            result_filtered.peaks[0].len() <= result_no_filter.peaks[0].len(),
4740            "Prominence filter should reduce peak count"
4741        );
4742    }
4743
4744    #[test]
4745    fn test_peak_detection_different_amplitudes() {
4746        // Test with various amplitudes: 0.5, 1.0, 2.0, 5.0
4747        let m = 200;
4748        let period = 2.0;
4749        let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 10.0 / (m - 1) as f64).collect();
4750
4751        for amplitude in [0.5, 1.0, 2.0, 5.0] {
4752            let data: Vec<f64> = argvals
4753                .iter()
4754                .map(|&t| amplitude * (2.0 * std::f64::consts::PI * t / period).sin())
4755                .collect();
4756
4757            let result = detect_peaks(&data, 1, m, &argvals, None, None, false, None);
4758
4759            assert_eq!(
4760                result.peaks[0].len(),
4761                5,
4762                "Amplitude {} should still find 5 peaks",
4763                amplitude
4764            );
4765
4766            // Peak values should be close to amplitude
4767            for peak in &result.peaks[0] {
4768                assert!(
4769                    (peak.value - amplitude).abs() < 0.1,
4770                    "Peak value {:.3} should be close to amplitude {}",
4771                    peak.value,
4772                    amplitude
4773                );
4774            }
4775        }
4776    }
4777
4778    #[test]
4779    fn test_peak_detection_varying_frequency() {
4780        // Signal with varying frequency: chirp-like signal
4781        // Peaks get closer together over time
4782        let m = 400;
4783        let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 10.0 / (m - 1) as f64).collect();
4784
4785        // Frequency increases linearly: f(t) = 0.5 + 0.1*t
4786        // Phase integral: phi(t) = 2*pi * (0.5*t + 0.05*t^2)
4787        let data: Vec<f64> = argvals
4788            .iter()
4789            .map(|&t| {
4790                let phase = 2.0 * std::f64::consts::PI * (0.5 * t + 0.05 * t * t);
4791                phase.sin()
4792            })
4793            .collect();
4794
4795        let result = detect_peaks(&data, 1, m, &argvals, None, None, false, None);
4796
4797        // Should find multiple peaks with decreasing spacing
4798        assert!(
4799            result.peaks[0].len() >= 5,
4800            "Should find at least 5 peaks, got {}",
4801            result.peaks[0].len()
4802        );
4803
4804        // Verify inter-peak distances decrease over time
4805        let distances = &result.inter_peak_distances[0];
4806        if distances.len() >= 3 {
4807            // Later peaks should be closer than earlier peaks
4808            let early_avg = (distances[0] + distances[1]) / 2.0;
4809            let late_avg = (distances[distances.len() - 2] + distances[distances.len() - 1]) / 2.0;
4810            assert!(
4811                late_avg < early_avg,
4812                "Later peaks should be closer: early avg={:.3}, late avg={:.3}",
4813                early_avg,
4814                late_avg
4815            );
4816        }
4817    }
4818
4819    #[test]
4820    fn test_peak_detection_sum_of_sines() {
4821        // Sum of two sine waves with different periods creates non-uniform peak spacing
4822        // y = sin(2*pi*t/2) + 0.5*sin(2*pi*t/3)
4823        let m = 300;
4824        let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 12.0 / (m - 1) as f64).collect();
4825
4826        let data: Vec<f64> = argvals
4827            .iter()
4828            .map(|&t| {
4829                (2.0 * std::f64::consts::PI * t / 2.0).sin()
4830                    + 0.5 * (2.0 * std::f64::consts::PI * t / 3.0).sin()
4831            })
4832            .collect();
4833
4834        let result = detect_peaks(&data, 1, m, &argvals, Some(1.0), None, false, None);
4835
4836        // Should find peaks (exact count depends on interference pattern)
4837        assert!(
4838            result.peaks[0].len() >= 4,
4839            "Should find at least 4 peaks, got {}",
4840            result.peaks[0].len()
4841        );
4842
4843        // Inter-peak distances should vary
4844        let distances = &result.inter_peak_distances[0];
4845        if distances.len() >= 2 {
4846            let min_dist = distances.iter().cloned().fold(f64::INFINITY, f64::min);
4847            let max_dist = distances.iter().cloned().fold(0.0, f64::max);
4848            assert!(
4849                max_dist > min_dist * 1.1,
4850                "Distances should vary: min={:.3}, max={:.3}",
4851                min_dist,
4852                max_dist
4853            );
4854        }
4855    }
4856
4857    #[test]
4858    fn test_seasonal_strength() {
4859        let m = 200;
4860        let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.1).collect();
4861        let period = 2.0;
4862        let data = generate_sine(1, m, period, &argvals);
4863
4864        let strength = seasonal_strength_variance(&data, 1, m, &argvals, period, 3);
4865        // Pure sine should have high seasonal strength
4866        assert!(strength > 0.8);
4867
4868        let strength_spectral = seasonal_strength_spectral(&data, 1, m, &argvals, period);
4869        assert!(strength_spectral > 0.5);
4870    }
4871
4872    #[test]
4873    fn test_instantaneous_period() {
4874        let m = 200;
4875        let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.1).collect();
4876        let period = 2.0;
4877        let data = generate_sine(1, m, period, &argvals);
4878
4879        let result = instantaneous_period(&data, 1, m, &argvals);
4880
4881        // Check that instantaneous period is close to true period (away from boundaries)
4882        let mid_period = result.period[m / 2];
4883        assert!(
4884            (mid_period - period).abs() < 0.5,
4885            "Expected period ~{}, got {}",
4886            period,
4887            mid_period
4888        );
4889    }
4890
4891    #[test]
4892    fn test_peak_timing_analysis() {
4893        // Generate 5 cycles of sine with period 2
4894        let m = 500;
4895        let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.02).collect();
4896        let period = 2.0;
4897        let data = generate_sine(1, m, period, &argvals);
4898
4899        let result = analyze_peak_timing(&data, 1, m, &argvals, period, Some(11));
4900
4901        // Should find approximately 5 peaks
4902        assert!(!result.peak_times.is_empty());
4903        // Normalized timing should be around 0.25 (peak of sin at π/2)
4904        assert!(result.mean_timing.is_finite());
4905        // Pure sine should have low timing variability
4906        assert!(result.std_timing < 0.1 || result.std_timing.is_nan());
4907    }
4908
4909    #[test]
4910    fn test_seasonality_classification() {
4911        let m = 400;
4912        let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.05).collect();
4913        let period = 2.0;
4914        let data = generate_sine(1, m, period, &argvals);
4915
4916        let result = classify_seasonality(&data, 1, m, &argvals, period, None, None);
4917
4918        assert!(result.is_seasonal);
4919        assert!(result.seasonal_strength > 0.5);
4920        assert!(matches!(
4921            result.classification,
4922            SeasonalType::StableSeasonal | SeasonalType::VariableTiming
4923        ));
4924    }
4925
4926    #[test]
4927    fn test_otsu_threshold() {
4928        // Bimodal distribution: mix of low (0.1-0.2) and high (0.7-0.9) values
4929        let values = vec![
4930            0.1, 0.12, 0.15, 0.18, 0.11, 0.14, 0.7, 0.75, 0.8, 0.85, 0.9, 0.72,
4931        ];
4932
4933        let threshold = otsu_threshold(&values);
4934
4935        // Threshold should be between the two modes
4936        // Due to small sample size, Otsu's method may not find optimal threshold
4937        // Just verify it returns a reasonable value in the data range
4938        assert!(threshold >= 0.1, "Threshold {} should be >= 0.1", threshold);
4939        assert!(threshold <= 0.9, "Threshold {} should be <= 0.9", threshold);
4940    }
4941
4942    #[test]
4943    fn test_gcv_fourier_nbasis_selection() {
4944        let m = 100;
4945        let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.1).collect();
4946
4947        // Noisy sine wave
4948        let mut data = vec![0.0; m];
4949        for j in 0..m {
4950            data[j] = (2.0 * PI * argvals[j] / 2.0).sin() + 0.1 * (j as f64 * 0.3).sin();
4951        }
4952
4953        let nbasis = crate::basis::select_fourier_nbasis_gcv(&data, 1, m, &argvals, 5, 25);
4954
4955        // nbasis should be reasonable (between min and max)
4956        assert!(nbasis >= 5);
4957        assert!(nbasis <= 25);
4958    }
4959
4960    #[test]
4961    fn test_detect_multiple_periods() {
4962        let m = 400;
4963        let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.05).collect(); // 0 to 20
4964
4965        // Signal with two periods: 2 and 7
4966        let period1 = 2.0;
4967        let period2 = 7.0;
4968        let mut data = vec![0.0; m];
4969        for j in 0..m {
4970            data[j] = (2.0 * PI * argvals[j] / period1).sin()
4971                + 0.6 * (2.0 * PI * argvals[j] / period2).sin();
4972        }
4973
4974        // Use higher min_strength threshold to properly stop after real periods
4975        let detected = detect_multiple_periods(&data, 1, m, &argvals, 5, 0.4, 0.20);
4976
4977        // Should detect exactly 2 periods with these thresholds
4978        assert!(
4979            detected.len() >= 2,
4980            "Expected at least 2 periods, found {}",
4981            detected.len()
4982        );
4983
4984        // Check that both periods were detected (order depends on amplitude)
4985        let periods: Vec<f64> = detected.iter().map(|d| d.period).collect();
4986        let has_period1 = periods.iter().any(|&p| (p - period1).abs() < 0.3);
4987        let has_period2 = periods.iter().any(|&p| (p - period2).abs() < 0.5);
4988
4989        assert!(
4990            has_period1,
4991            "Expected to find period ~{}, got {:?}",
4992            period1, periods
4993        );
4994        assert!(
4995            has_period2,
4996            "Expected to find period ~{}, got {:?}",
4997            period2, periods
4998        );
4999
5000        // Verify first detected has higher amplitude (amplitude 1.0 vs 0.6)
5001        assert!(
5002            detected[0].amplitude > detected[1].amplitude,
5003            "First detected should have higher amplitude"
5004        );
5005
5006        // Each detected period should have strength and confidence info
5007        for d in &detected {
5008            assert!(
5009                d.strength > 0.0,
5010                "Detected period should have positive strength"
5011            );
5012            assert!(
5013                d.confidence > 0.0,
5014                "Detected period should have positive confidence"
5015            );
5016            assert!(
5017                d.amplitude > 0.0,
5018                "Detected period should have positive amplitude"
5019            );
5020        }
5021    }
5022
5023    // ========================================================================
5024    // Amplitude Modulation Detection Tests
5025    // ========================================================================
5026
5027    #[test]
5028    fn test_amplitude_modulation_stable() {
5029        // Constant amplitude seasonal signal - should detect as Stable
5030        let m = 200;
5031        let period = 0.2;
5032        let argvals: Vec<f64> = (0..m).map(|i| i as f64 / (m - 1) as f64).collect();
5033
5034        // Constant amplitude sine wave
5035        let data: Vec<f64> = argvals
5036            .iter()
5037            .map(|&t| (2.0 * PI * t / period).sin())
5038            .collect();
5039
5040        let result = detect_amplitude_modulation(
5041            &data, 1, m, &argvals, period, 0.15, // modulation threshold
5042            0.3,  // seasonality threshold
5043        );
5044
5045        eprintln!(
5046            "Stable test: is_seasonal={}, has_modulation={}, modulation_score={:.4}, amplitude_trend={:.4}, type={:?}",
5047            result.is_seasonal, result.has_modulation, result.modulation_score, result.amplitude_trend, result.modulation_type
5048        );
5049
5050        assert!(result.is_seasonal, "Should detect seasonality");
5051        assert!(
5052            !result.has_modulation,
5053            "Constant amplitude should not have modulation, got score={:.4}",
5054            result.modulation_score
5055        );
5056        assert_eq!(
5057            result.modulation_type,
5058            ModulationType::Stable,
5059            "Should be classified as Stable"
5060        );
5061    }
5062
5063    #[test]
5064    fn test_amplitude_modulation_emerging() {
5065        // Amplitude increases over time (emerging seasonality)
5066        let m = 200;
5067        let period = 0.2;
5068        let argvals: Vec<f64> = (0..m).map(|i| i as f64 / (m - 1) as f64).collect();
5069
5070        // Amplitude grows from 0.2 to 1.0
5071        let data: Vec<f64> = argvals
5072            .iter()
5073            .map(|&t| {
5074                let amplitude = 0.2 + 0.8 * t; // Linear increase
5075                amplitude * (2.0 * PI * t / period).sin()
5076            })
5077            .collect();
5078
5079        let result = detect_amplitude_modulation(&data, 1, m, &argvals, period, 0.15, 0.2);
5080
5081        eprintln!(
5082            "Emerging test: is_seasonal={}, has_modulation={}, modulation_score={:.4}, amplitude_trend={:.4}, type={:?}",
5083            result.is_seasonal, result.has_modulation, result.modulation_score, result.amplitude_trend, result.modulation_type
5084        );
5085
5086        assert!(result.is_seasonal, "Should detect seasonality");
5087        assert!(
5088            result.has_modulation,
5089            "Growing amplitude should have modulation, score={:.4}",
5090            result.modulation_score
5091        );
5092        assert_eq!(
5093            result.modulation_type,
5094            ModulationType::Emerging,
5095            "Should be classified as Emerging, trend={:.4}",
5096            result.amplitude_trend
5097        );
5098        assert!(
5099            result.amplitude_trend > 0.0,
5100            "Trend should be positive for emerging"
5101        );
5102    }
5103
5104    #[test]
5105    fn test_amplitude_modulation_fading() {
5106        // Amplitude decreases over time (fading seasonality)
5107        let m = 200;
5108        let period = 0.2;
5109        let argvals: Vec<f64> = (0..m).map(|i| i as f64 / (m - 1) as f64).collect();
5110
5111        // Amplitude decreases from 1.0 to 0.2
5112        let data: Vec<f64> = argvals
5113            .iter()
5114            .map(|&t| {
5115                let amplitude = 1.0 - 0.8 * t; // Linear decrease
5116                amplitude * (2.0 * PI * t / period).sin()
5117            })
5118            .collect();
5119
5120        let result = detect_amplitude_modulation(&data, 1, m, &argvals, period, 0.15, 0.2);
5121
5122        eprintln!(
5123            "Fading test: is_seasonal={}, has_modulation={}, modulation_score={:.4}, amplitude_trend={:.4}, type={:?}",
5124            result.is_seasonal, result.has_modulation, result.modulation_score, result.amplitude_trend, result.modulation_type
5125        );
5126
5127        assert!(result.is_seasonal, "Should detect seasonality");
5128        assert!(
5129            result.has_modulation,
5130            "Fading amplitude should have modulation"
5131        );
5132        assert_eq!(
5133            result.modulation_type,
5134            ModulationType::Fading,
5135            "Should be classified as Fading, trend={:.4}",
5136            result.amplitude_trend
5137        );
5138        assert!(
5139            result.amplitude_trend < 0.0,
5140            "Trend should be negative for fading"
5141        );
5142    }
5143
5144    #[test]
5145    fn test_amplitude_modulation_oscillating() {
5146        // Amplitude oscillates (neither purely emerging nor fading)
5147        let m = 200;
5148        let period = 0.1;
5149        let argvals: Vec<f64> = (0..m).map(|i| i as f64 / (m - 1) as f64).collect();
5150
5151        // Amplitude oscillates: high-low-high-low pattern
5152        let data: Vec<f64> = argvals
5153            .iter()
5154            .map(|&t| {
5155                let amplitude = 0.5 + 0.4 * (2.0 * PI * t * 2.0).sin(); // 2 modulation cycles
5156                amplitude * (2.0 * PI * t / period).sin()
5157            })
5158            .collect();
5159
5160        let result = detect_amplitude_modulation(&data, 1, m, &argvals, period, 0.15, 0.2);
5161
5162        eprintln!(
5163            "Oscillating test: is_seasonal={}, has_modulation={}, modulation_score={:.4}, amplitude_trend={:.4}, type={:?}",
5164            result.is_seasonal, result.has_modulation, result.modulation_score, result.amplitude_trend, result.modulation_type
5165        );
5166
5167        assert!(result.is_seasonal, "Should detect seasonality");
5168        // Oscillating has high variation but near-zero trend
5169        if result.has_modulation {
5170            // Trend should be near zero for oscillating
5171            assert!(
5172                result.amplitude_trend.abs() < 0.5,
5173                "Trend should be small for oscillating"
5174            );
5175        }
5176    }
5177
5178    #[test]
5179    fn test_amplitude_modulation_non_seasonal() {
5180        // Pure noise - no seasonality
5181        let m = 100;
5182        let argvals: Vec<f64> = (0..m).map(|i| i as f64 / (m - 1) as f64).collect();
5183
5184        // Random noise (use simple pseudo-random)
5185        let data: Vec<f64> = (0..m)
5186            .map(|i| ((i as f64 * 1.618).sin() * 100.0).fract())
5187            .collect();
5188
5189        let result = detect_amplitude_modulation(
5190            &data, 1, m, &argvals, 0.2, // arbitrary period
5191            0.15, 0.3,
5192        );
5193
5194        assert!(
5195            !result.is_seasonal,
5196            "Noise should not be detected as seasonal"
5197        );
5198        assert_eq!(
5199            result.modulation_type,
5200            ModulationType::NonSeasonal,
5201            "Should be classified as NonSeasonal"
5202        );
5203    }
5204
5205    // ========================================================================
5206    // Wavelet-based Amplitude Modulation Detection Tests
5207    // ========================================================================
5208
5209    #[test]
5210    fn test_wavelet_amplitude_modulation_stable() {
5211        let m = 200;
5212        let period = 0.2;
5213        let argvals: Vec<f64> = (0..m).map(|i| i as f64 / (m - 1) as f64).collect();
5214
5215        let data: Vec<f64> = argvals
5216            .iter()
5217            .map(|&t| (2.0 * PI * t / period).sin())
5218            .collect();
5219
5220        let result = detect_amplitude_modulation_wavelet(&data, 1, m, &argvals, period, 0.15, 0.3);
5221
5222        eprintln!(
5223            "Wavelet stable: is_seasonal={}, has_modulation={}, modulation_score={:.4}, amplitude_trend={:.4}, type={:?}",
5224            result.is_seasonal, result.has_modulation, result.modulation_score, result.amplitude_trend, result.modulation_type
5225        );
5226
5227        assert!(result.is_seasonal, "Should detect seasonality");
5228        assert!(
5229            !result.has_modulation,
5230            "Constant amplitude should not have modulation, got score={:.4}",
5231            result.modulation_score
5232        );
5233    }
5234
5235    #[test]
5236    fn test_wavelet_amplitude_modulation_emerging() {
5237        let m = 200;
5238        let period = 0.2;
5239        let argvals: Vec<f64> = (0..m).map(|i| i as f64 / (m - 1) as f64).collect();
5240
5241        // Amplitude grows from 0.2 to 1.0
5242        let data: Vec<f64> = argvals
5243            .iter()
5244            .map(|&t| {
5245                let amplitude = 0.2 + 0.8 * t;
5246                amplitude * (2.0 * PI * t / period).sin()
5247            })
5248            .collect();
5249
5250        let result = detect_amplitude_modulation_wavelet(&data, 1, m, &argvals, period, 0.15, 0.2);
5251
5252        eprintln!(
5253            "Wavelet emerging: is_seasonal={}, has_modulation={}, modulation_score={:.4}, amplitude_trend={:.4}, type={:?}",
5254            result.is_seasonal, result.has_modulation, result.modulation_score, result.amplitude_trend, result.modulation_type
5255        );
5256
5257        assert!(result.is_seasonal, "Should detect seasonality");
5258        assert!(
5259            result.has_modulation,
5260            "Growing amplitude should have modulation"
5261        );
5262        assert!(
5263            result.amplitude_trend > 0.0,
5264            "Trend should be positive for emerging"
5265        );
5266    }
5267
5268    #[test]
5269    fn test_wavelet_amplitude_modulation_fading() {
5270        let m = 200;
5271        let period = 0.2;
5272        let argvals: Vec<f64> = (0..m).map(|i| i as f64 / (m - 1) as f64).collect();
5273
5274        // Amplitude decreases from 1.0 to 0.2
5275        let data: Vec<f64> = argvals
5276            .iter()
5277            .map(|&t| {
5278                let amplitude = 1.0 - 0.8 * t;
5279                amplitude * (2.0 * PI * t / period).sin()
5280            })
5281            .collect();
5282
5283        let result = detect_amplitude_modulation_wavelet(&data, 1, m, &argvals, period, 0.15, 0.2);
5284
5285        eprintln!(
5286            "Wavelet fading: is_seasonal={}, has_modulation={}, modulation_score={:.4}, amplitude_trend={:.4}, type={:?}",
5287            result.is_seasonal, result.has_modulation, result.modulation_score, result.amplitude_trend, result.modulation_type
5288        );
5289
5290        assert!(result.is_seasonal, "Should detect seasonality");
5291        assert!(
5292            result.has_modulation,
5293            "Fading amplitude should have modulation"
5294        );
5295        assert!(
5296            result.amplitude_trend < 0.0,
5297            "Trend should be negative for fading"
5298        );
5299    }
5300
5301    #[test]
5302    fn test_seasonal_strength_wavelet() {
5303        let m = 200;
5304        let period = 0.2;
5305        let argvals: Vec<f64> = (0..m).map(|i| i as f64 / (m - 1) as f64).collect();
5306
5307        // Pure sine wave at target period - should have high strength
5308        let seasonal_data: Vec<f64> = argvals
5309            .iter()
5310            .map(|&t| (2.0 * PI * t / period).sin())
5311            .collect();
5312
5313        let strength = seasonal_strength_wavelet(&seasonal_data, 1, m, &argvals, period);
5314        eprintln!("Wavelet strength (pure sine): {:.4}", strength);
5315        assert!(
5316            strength > 0.5,
5317            "Pure sine should have high wavelet strength"
5318        );
5319
5320        // Pure noise - should have low strength
5321        let noise_data: Vec<f64> = (0..m)
5322            .map(|i| ((i * 12345 + 67890) % 1000) as f64 / 1000.0 - 0.5)
5323            .collect();
5324
5325        let noise_strength = seasonal_strength_wavelet(&noise_data, 1, m, &argvals, period);
5326        eprintln!("Wavelet strength (noise): {:.4}", noise_strength);
5327        assert!(
5328            noise_strength < 0.3,
5329            "Noise should have low wavelet strength"
5330        );
5331
5332        // Wrong period - should have lower strength
5333        let wrong_period_strength =
5334            seasonal_strength_wavelet(&seasonal_data, 1, m, &argvals, period * 2.0);
5335        eprintln!(
5336            "Wavelet strength (wrong period): {:.4}",
5337            wrong_period_strength
5338        );
5339        assert!(
5340            wrong_period_strength < strength,
5341            "Wrong period should have lower strength"
5342        );
5343    }
5344
5345    #[test]
5346    fn test_compute_mean_curve() {
5347        // 2 samples, 3 time points
5348        // Sample 1: [1, 2, 3]
5349        // Sample 2: [2, 4, 6]
5350        // Mean: [1.5, 3, 4.5]
5351        let data = vec![1.0, 2.0, 2.0, 4.0, 3.0, 6.0]; // column-major
5352        let mean = compute_mean_curve(&data, 2, 3);
5353        assert_eq!(mean.len(), 3);
5354        assert!((mean[0] - 1.5).abs() < 1e-10);
5355        assert!((mean[1] - 3.0).abs() < 1e-10);
5356        assert!((mean[2] - 4.5).abs() < 1e-10);
5357    }
5358
5359    #[test]
5360    fn test_compute_mean_curve_parallel_consistency() {
5361        // Test that parallel and sequential give same results
5362        let n = 10;
5363        let m = 200;
5364        let data: Vec<f64> = (0..n * m).map(|i| (i as f64 * 0.1).sin()).collect();
5365
5366        let seq_result = compute_mean_curve_impl(&data, n, m, false);
5367        let par_result = compute_mean_curve_impl(&data, n, m, true);
5368
5369        assert_eq!(seq_result.len(), par_result.len());
5370        for (s, p) in seq_result.iter().zip(par_result.iter()) {
5371            assert!(
5372                (s - p).abs() < 1e-10,
5373                "Sequential and parallel results differ"
5374            );
5375        }
5376    }
5377
5378    #[test]
5379    fn test_interior_bounds() {
5380        // m = 100: edge_skip = 10, interior = [10, 90)
5381        let bounds = interior_bounds(100);
5382        assert!(bounds.is_some());
5383        let (start, end) = bounds.unwrap();
5384        assert_eq!(start, 10);
5385        assert_eq!(end, 90);
5386
5387        // m = 10: edge_skip = 1, but min(1, 2) = 1, max(9, 7) = 9
5388        let bounds = interior_bounds(10);
5389        assert!(bounds.is_some());
5390        let (start, end) = bounds.unwrap();
5391        assert!(start < end);
5392
5393        // Very small m might not have valid interior
5394        let bounds = interior_bounds(2);
5395        // Should still return something as long as end > start
5396        assert!(bounds.is_some() || bounds.is_none());
5397    }
5398
5399    #[test]
5400    fn test_hilbert_transform_pure_sine() {
5401        // Hilbert transform of sin(t) should give cos(t) in imaginary part
5402        let m = 200;
5403        let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.1).collect();
5404        let signal: Vec<f64> = argvals.iter().map(|&t| (2.0 * PI * t).sin()).collect();
5405
5406        let analytic = hilbert_transform(&signal);
5407        assert_eq!(analytic.len(), m);
5408
5409        // Check amplitude is approximately 1
5410        for c in analytic.iter().skip(10).take(m - 20) {
5411            let amp = c.norm();
5412            assert!(
5413                (amp - 1.0).abs() < 0.1,
5414                "Amplitude should be ~1, got {}",
5415                amp
5416            );
5417        }
5418    }
5419
5420    #[test]
5421    fn test_sazed_pure_sine() {
5422        // Pure sine wave with known period
5423        let m = 200;
5424        let period = 2.0;
5425        let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.1).collect();
5426        let data: Vec<f64> = argvals
5427            .iter()
5428            .map(|&t| (2.0 * PI * t / period).sin())
5429            .collect();
5430
5431        let result = sazed(&data, &argvals, None);
5432
5433        assert!(result.period.is_finite(), "SAZED should detect a period");
5434        assert!(
5435            (result.period - period).abs() < 0.3,
5436            "Expected period ~{}, got {}",
5437            period,
5438            result.period
5439        );
5440        assert!(
5441            result.confidence > 0.4,
5442            "Expected confidence > 0.4, got {}",
5443            result.confidence
5444        );
5445        assert!(
5446            result.agreeing_components >= 2,
5447            "Expected at least 2 agreeing components, got {}",
5448            result.agreeing_components
5449        );
5450    }
5451
5452    #[test]
5453    fn test_sazed_noisy_sine() {
5454        // Sine wave with noise
5455        let m = 300;
5456        let period = 3.0;
5457        let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.1).collect();
5458
5459        // Deterministic pseudo-noise using sin with different frequency
5460        let data: Vec<f64> = argvals
5461            .iter()
5462            .enumerate()
5463            .map(|(i, &t)| {
5464                let signal = (2.0 * PI * t / period).sin();
5465                let noise = 0.1 * (17.3 * i as f64).sin();
5466                signal + noise
5467            })
5468            .collect();
5469
5470        let result = sazed(&data, &argvals, Some(0.15));
5471
5472        assert!(
5473            result.period.is_finite(),
5474            "SAZED should detect a period even with noise"
5475        );
5476        assert!(
5477            (result.period - period).abs() < 0.5,
5478            "Expected period ~{}, got {}",
5479            period,
5480            result.period
5481        );
5482    }
5483
5484    #[test]
5485    fn test_sazed_fdata() {
5486        // Multiple samples with same period
5487        let n = 5;
5488        let m = 200;
5489        let period = 2.0;
5490        let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.1).collect();
5491        let data = generate_sine(n, m, period, &argvals);
5492
5493        let result = sazed_fdata(&data, n, m, &argvals, None);
5494
5495        assert!(result.period.is_finite(), "SAZED should detect a period");
5496        assert!(
5497            (result.period - period).abs() < 0.3,
5498            "Expected period ~{}, got {}",
5499            period,
5500            result.period
5501        );
5502    }
5503
5504    #[test]
5505    fn test_sazed_short_series() {
5506        // Very short series - should return NaN gracefully
5507        let argvals: Vec<f64> = (0..5).map(|i| i as f64).collect();
5508        let data: Vec<f64> = argvals.iter().map(|&t| t.sin()).collect();
5509
5510        let result = sazed(&data, &argvals, None);
5511
5512        // Should handle gracefully (return NaN for too-short series)
5513        assert!(
5514            result.period.is_nan() || result.period.is_finite(),
5515            "Should return NaN or valid period"
5516        );
5517    }
5518
5519    #[test]
5520    fn test_autoperiod_pure_sine() {
5521        // Pure sine wave with known period
5522        let m = 200;
5523        let period = 2.0;
5524        let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.1).collect();
5525        let data: Vec<f64> = argvals
5526            .iter()
5527            .map(|&t| (2.0 * PI * t / period).sin())
5528            .collect();
5529
5530        let result = autoperiod(&data, &argvals, None, None);
5531
5532        assert!(
5533            result.period.is_finite(),
5534            "Autoperiod should detect a period"
5535        );
5536        assert!(
5537            (result.period - period).abs() < 0.3,
5538            "Expected period ~{}, got {}",
5539            period,
5540            result.period
5541        );
5542        assert!(
5543            result.confidence > 0.0,
5544            "Expected positive confidence, got {}",
5545            result.confidence
5546        );
5547    }
5548
5549    #[test]
5550    fn test_autoperiod_with_trend() {
5551        // Sine wave with linear trend
5552        let m = 300;
5553        let period = 3.0;
5554        let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.1).collect();
5555        let data: Vec<f64> = argvals
5556            .iter()
5557            .map(|&t| 0.2 * t + (2.0 * PI * t / period).sin())
5558            .collect();
5559
5560        let result = autoperiod(&data, &argvals, None, None);
5561
5562        assert!(
5563            result.period.is_finite(),
5564            "Autoperiod should detect a period"
5565        );
5566        // Allow more tolerance with trend
5567        assert!(
5568            (result.period - period).abs() < 0.5,
5569            "Expected period ~{}, got {}",
5570            period,
5571            result.period
5572        );
5573    }
5574
5575    #[test]
5576    fn test_autoperiod_candidates() {
5577        // Verify candidates are generated
5578        let m = 200;
5579        let period = 2.0;
5580        let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.1).collect();
5581        let data: Vec<f64> = argvals
5582            .iter()
5583            .map(|&t| (2.0 * PI * t / period).sin())
5584            .collect();
5585
5586        let result = autoperiod(&data, &argvals, Some(5), Some(10));
5587
5588        assert!(
5589            !result.candidates.is_empty(),
5590            "Should have at least one candidate"
5591        );
5592
5593        // Best candidate should have highest combined score
5594        let max_score = result
5595            .candidates
5596            .iter()
5597            .map(|c| c.combined_score)
5598            .fold(f64::NEG_INFINITY, f64::max);
5599        assert!(
5600            (result.confidence - max_score).abs() < 1e-10,
5601            "Returned confidence should match best candidate's score"
5602        );
5603    }
5604
5605    #[test]
5606    fn test_autoperiod_fdata() {
5607        // Multiple samples with same period
5608        let n = 5;
5609        let m = 200;
5610        let period = 2.0;
5611        let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.1).collect();
5612        let data = generate_sine(n, m, period, &argvals);
5613
5614        let result = autoperiod_fdata(&data, n, m, &argvals, None, None);
5615
5616        assert!(
5617            result.period.is_finite(),
5618            "Autoperiod should detect a period"
5619        );
5620        assert!(
5621            (result.period - period).abs() < 0.3,
5622            "Expected period ~{}, got {}",
5623            period,
5624            result.period
5625        );
5626    }
5627
5628    #[test]
5629    fn test_cfd_autoperiod_pure_sine() {
5630        // Pure sine wave with known period
5631        let m = 200;
5632        let period = 2.0;
5633        let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.1).collect();
5634        let data: Vec<f64> = argvals
5635            .iter()
5636            .map(|&t| (2.0 * PI * t / period).sin())
5637            .collect();
5638
5639        let result = cfd_autoperiod(&data, &argvals, None, None);
5640
5641        assert!(
5642            result.period.is_finite(),
5643            "CFDAutoperiod should detect a period"
5644        );
5645        assert!(
5646            (result.period - period).abs() < 0.3,
5647            "Expected period ~{}, got {}",
5648            period,
5649            result.period
5650        );
5651    }
5652
5653    #[test]
5654    fn test_cfd_autoperiod_with_trend() {
5655        // Sine wave with strong linear trend - CFD excels here
5656        let m = 300;
5657        let period = 3.0;
5658        let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.1).collect();
5659        let data: Vec<f64> = argvals
5660            .iter()
5661            .map(|&t| 2.0 * t + (2.0 * PI * t / period).sin())
5662            .collect();
5663
5664        let result = cfd_autoperiod(&data, &argvals, None, None);
5665
5666        assert!(
5667            result.period.is_finite(),
5668            "CFDAutoperiod should detect a period despite trend"
5669        );
5670        // Allow more tolerance since trend can affect detection
5671        assert!(
5672            (result.period - period).abs() < 0.6,
5673            "Expected period ~{}, got {}",
5674            period,
5675            result.period
5676        );
5677    }
5678
5679    #[test]
5680    fn test_cfd_autoperiod_multiple_periods() {
5681        // Signal with two periods - should detect multiple
5682        let m = 400;
5683        let period1 = 2.0;
5684        let period2 = 5.0;
5685        let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.1).collect();
5686        let data: Vec<f64> = argvals
5687            .iter()
5688            .map(|&t| (2.0 * PI * t / period1).sin() + 0.5 * (2.0 * PI * t / period2).sin())
5689            .collect();
5690
5691        let result = cfd_autoperiod(&data, &argvals, None, None);
5692
5693        assert!(
5694            !result.periods.is_empty(),
5695            "Should detect at least one period"
5696        );
5697        // The primary period should be one of the two
5698        let close_to_p1 = (result.period - period1).abs() < 0.5;
5699        let close_to_p2 = (result.period - period2).abs() < 1.0;
5700        assert!(
5701            close_to_p1 || close_to_p2,
5702            "Primary period {} not close to {} or {}",
5703            result.period,
5704            period1,
5705            period2
5706        );
5707    }
5708
5709    #[test]
5710    fn test_cfd_autoperiod_fdata() {
5711        // Multiple samples with same period
5712        let n = 5;
5713        let m = 200;
5714        let period = 2.0;
5715        let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.1).collect();
5716        let data = generate_sine(n, m, period, &argvals);
5717
5718        let result = cfd_autoperiod_fdata(&data, n, m, &argvals, None, None);
5719
5720        assert!(
5721            result.period.is_finite(),
5722            "CFDAutoperiod should detect a period"
5723        );
5724        assert!(
5725            (result.period - period).abs() < 0.3,
5726            "Expected period ~{}, got {}",
5727            period,
5728            result.period
5729        );
5730    }
5731
5732    // ========================================================================
5733    // SSA Tests
5734    // ========================================================================
5735
5736    #[test]
5737    fn test_ssa_pure_sine() {
5738        let n = 200;
5739        let period = 12.0;
5740        let values: Vec<f64> = (0..n)
5741            .map(|i| {
5742                let t = i as f64;
5743                0.01 * t + (2.0 * PI * t / period).sin() + 0.05 * ((i * 7) as f64 * 0.1).sin()
5744            })
5745            .collect();
5746
5747        let result = ssa(&values, None, None, None, None);
5748
5749        // trend + seasonal + noise ≈ original
5750        for i in 0..n {
5751            let reconstructed = result.trend[i] + result.seasonal[i] + result.noise[i];
5752            assert!(
5753                (reconstructed - values[i]).abs() < 1e-8,
5754                "SSA reconstruction error at {}: {} vs {}",
5755                i,
5756                reconstructed,
5757                values[i]
5758            );
5759        }
5760
5761        // Singular values should be descending
5762        for i in 1..result.singular_values.len() {
5763            assert!(
5764                result.singular_values[i] <= result.singular_values[i - 1] + 1e-10,
5765                "Singular values should be descending: {} > {}",
5766                result.singular_values[i],
5767                result.singular_values[i - 1]
5768            );
5769        }
5770
5771        // Contributions should sum to <= 1
5772        let total_contrib: f64 = result.contributions.iter().sum();
5773        assert!(
5774            total_contrib <= 1.0 + 1e-10,
5775            "Contributions should sum to <= 1, got {}",
5776            total_contrib
5777        );
5778    }
5779
5780    #[test]
5781    fn test_ssa_explicit_groupings() {
5782        let n = 100;
5783        let period = 10.0;
5784        let values: Vec<f64> = (0..n)
5785            .map(|i| 0.01 * i as f64 + (2.0 * PI * i as f64 / period).sin())
5786            .collect();
5787
5788        let trend_components = [0usize];
5789        let seasonal_components = [1usize, 2];
5790
5791        let result = ssa(
5792            &values,
5793            None,
5794            None,
5795            Some(&trend_components),
5796            Some(&seasonal_components),
5797        );
5798
5799        assert_eq!(result.trend.len(), n);
5800        assert_eq!(result.seasonal.len(), n);
5801        assert_eq!(result.noise.len(), n);
5802
5803        // Reconstruction should still hold
5804        for i in 0..n {
5805            let reconstructed = result.trend[i] + result.seasonal[i] + result.noise[i];
5806            assert!(
5807                (reconstructed - values[i]).abs() < 1e-8,
5808                "SSA explicit grouping reconstruction error at {}",
5809                i
5810            );
5811        }
5812    }
5813
5814    #[test]
5815    fn test_ssa_short_series() {
5816        // n < 4 should trigger early return
5817        let values = vec![1.0, 2.0, 3.0];
5818        let result = ssa(&values, None, None, None, None);
5819
5820        assert_eq!(result.trend, values);
5821        assert_eq!(result.seasonal, vec![0.0; 3]);
5822        assert_eq!(result.noise, vec![0.0; 3]);
5823        assert_eq!(result.n_components, 0);
5824    }
5825
5826    #[test]
5827    fn test_ssa_fdata() {
5828        let n = 5;
5829        let m = 100;
5830        let mut data = vec![0.0; n * m];
5831        for i in 0..n {
5832            let amp = (i + 1) as f64;
5833            for j in 0..m {
5834                data[i + j * n] = amp * (2.0 * PI * j as f64 / 12.0).sin() + 0.01 * j as f64;
5835            }
5836        }
5837
5838        let result = ssa_fdata(&data, n, m, None, None);
5839
5840        assert_eq!(result.trend.len(), m);
5841        assert_eq!(result.seasonal.len(), m);
5842        assert_eq!(result.noise.len(), m);
5843        assert!(!result.singular_values.is_empty());
5844    }
5845
5846    #[test]
5847    fn test_ssa_seasonality() {
5848        // Seasonal signal
5849        let n = 200;
5850        let period = 12.0;
5851        let seasonal_values: Vec<f64> = (0..n)
5852            .map(|i| (2.0 * PI * i as f64 / period).sin())
5853            .collect();
5854
5855        let (is_seasonal, _det_period, confidence) =
5856            ssa_seasonality(&seasonal_values, None, Some(0.05));
5857
5858        // A pure sine should be detected as seasonal
5859        // (confidence depends on component grouping)
5860        assert!(confidence >= 0.0, "Confidence should be non-negative");
5861
5862        // Noise-only signal should not be seasonal
5863        let noise_values: Vec<f64> = (0..n)
5864            .map(|i| ((i * 13 + 7) as f64 * 0.1).sin() * 0.01)
5865            .collect();
5866
5867        let (is_noise_seasonal, _, noise_conf) = ssa_seasonality(&noise_values, None, Some(0.5));
5868
5869        // Noise should have low confidence (but it's not guaranteed to be strictly false
5870        // depending on auto-grouping, so we just check confidence)
5871        let _ = (is_seasonal, is_noise_seasonal, noise_conf);
5872    }
5873
5874    // ========================================================================
5875    // Matrix Profile Tests
5876    // ========================================================================
5877
5878    #[test]
5879    fn test_matrix_profile_periodic() {
5880        let period = 20;
5881        let n = period * 10;
5882        let values: Vec<f64> = (0..n)
5883            .map(|i| (2.0 * PI * i as f64 / period as f64).sin())
5884            .collect();
5885
5886        let result = matrix_profile(&values, Some(15), None);
5887
5888        assert_eq!(result.profile.len(), n - 15 + 1);
5889        assert_eq!(result.profile_index.len(), n - 15 + 1);
5890        assert_eq!(result.subsequence_length, 15);
5891
5892        // Profile should be finite
5893        for &p in &result.profile {
5894            assert!(p.is_finite(), "Profile values should be finite");
5895        }
5896
5897        // Primary period should be close to 20
5898        assert!(
5899            (result.primary_period - period as f64).abs() < 5.0,
5900            "Expected primary_period ≈ {}, got {}",
5901            period,
5902            result.primary_period
5903        );
5904    }
5905
5906    #[test]
5907    fn test_matrix_profile_non_periodic() {
5908        // Linear ramp (non-periodic)
5909        let n = 200;
5910        let values: Vec<f64> = (0..n).map(|i| i as f64 * 0.1).collect();
5911
5912        let result = matrix_profile(&values, Some(10), None);
5913
5914        assert_eq!(result.profile.len(), n - 10 + 1);
5915
5916        // Should have lower confidence than periodic
5917        // (not always strictly 0, depends on ramp structure)
5918        assert!(result.confidence <= 1.0, "Confidence should be <= 1.0");
5919    }
5920
5921    #[test]
5922    fn test_matrix_profile_fdata() {
5923        let n = 3;
5924        let m = 200;
5925        let period = 20.0;
5926        let mut data = vec![0.0; n * m];
5927        for i in 0..n {
5928            let amp = (i + 1) as f64;
5929            for j in 0..m {
5930                data[i + j * n] = amp * (2.0 * PI * j as f64 / period).sin();
5931            }
5932        }
5933
5934        let result = matrix_profile_fdata(&data, n, m, Some(15), None);
5935
5936        assert!(!result.profile.is_empty());
5937        assert!(result.profile_index.len() == result.profile.len());
5938    }
5939
5940    #[test]
5941    fn test_matrix_profile_seasonality() {
5942        let period = 20;
5943        let n = period * 10;
5944        // Periodic signal
5945        let values: Vec<f64> = (0..n)
5946            .map(|i| (2.0 * PI * i as f64 / period as f64).sin())
5947            .collect();
5948
5949        let (is_seasonal, det_period, confidence) =
5950            matrix_profile_seasonality(&values, Some(15), Some(0.05));
5951
5952        assert!(
5953            is_seasonal,
5954            "Periodic signal should be detected as seasonal"
5955        );
5956        assert!(det_period > 0.0, "Detected period should be positive");
5957        assert!(confidence >= 0.05, "Confidence should be above threshold");
5958
5959        // Weak / non-periodic signal
5960        let weak_values: Vec<f64> = (0..n).map(|i| i as f64 * 0.001).collect();
5961        let (is_weak_seasonal, _, _) =
5962            matrix_profile_seasonality(&weak_values, Some(15), Some(0.5));
5963        let _ = is_weak_seasonal; // May or may not detect - we just check it doesn't panic
5964    }
5965
5966    // ========================================================================
5967    // Lomb-Scargle Tests
5968    // ========================================================================
5969
5970    #[test]
5971    fn test_lomb_scargle_regular() {
5972        let n = 200;
5973        let true_period = 5.0;
5974        let times: Vec<f64> = (0..n).map(|i| i as f64 * 0.1).collect();
5975        let values: Vec<f64> = times
5976            .iter()
5977            .map(|&t| (2.0 * PI * t / true_period).sin())
5978            .collect();
5979
5980        let result = lomb_scargle(&times, &values, None, None, None);
5981
5982        assert!(
5983            (result.peak_period - true_period).abs() < 0.5,
5984            "Expected peak_period ≈ {}, got {}",
5985            true_period,
5986            result.peak_period
5987        );
5988        assert!(
5989            result.false_alarm_probability < 0.05,
5990            "FAP should be low for strong signal: {}",
5991            result.false_alarm_probability
5992        );
5993        assert!(result.peak_power > 0.0, "Peak power should be positive");
5994        assert!(!result.frequencies.is_empty());
5995        assert_eq!(result.frequencies.len(), result.power.len());
5996        assert_eq!(result.frequencies.len(), result.periods.len());
5997    }
5998
5999    #[test]
6000    fn test_lomb_scargle_irregular() {
6001        let true_period = 5.0;
6002        // Irregularly sampled: take a subset of regular samples
6003        let all_times: Vec<f64> = (0..300).map(|i| i as f64 * 0.1).collect();
6004        // Take every other point with some jitter-like selection
6005        let times: Vec<f64> = all_times
6006            .iter()
6007            .enumerate()
6008            .filter(|(i, _)| i % 2 == 0 || i % 3 == 0)
6009            .map(|(_, &t)| t)
6010            .collect();
6011        let values: Vec<f64> = times
6012            .iter()
6013            .map(|&t| (2.0 * PI * t / true_period).sin())
6014            .collect();
6015
6016        let result = lomb_scargle(&times, &values, None, None, None);
6017
6018        assert!(
6019            (result.peak_period - true_period).abs() < 1.0,
6020            "Irregular LS: expected period ≈ {}, got {}",
6021            true_period,
6022            result.peak_period
6023        );
6024    }
6025
6026    #[test]
6027    fn test_lomb_scargle_custom_frequencies() {
6028        let n = 100;
6029        let true_period = 5.0;
6030        let times: Vec<f64> = (0..n).map(|i| i as f64 * 0.1).collect();
6031        let values: Vec<f64> = times
6032            .iter()
6033            .map(|&t| (2.0 * PI * t / true_period).sin())
6034            .collect();
6035
6036        // Explicit frequency grid
6037        let frequencies: Vec<f64> = (1..50).map(|i| i as f64 * 0.01).collect();
6038        let result = lomb_scargle(&times, &values, Some(&frequencies), None, None);
6039
6040        assert_eq!(result.frequencies.len(), frequencies.len());
6041        assert_eq!(result.power.len(), frequencies.len());
6042        assert!(result.peak_power > 0.0);
6043    }
6044
6045    #[test]
6046    fn test_lomb_scargle_fdata() {
6047        let n = 5;
6048        let m = 200;
6049        let period = 5.0;
6050        let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.1).collect();
6051        let data = generate_sine(n, m, period, &argvals);
6052
6053        let result = lomb_scargle_fdata(&data, n, m, &argvals, None, None);
6054
6055        assert!(
6056            (result.peak_period - period).abs() < 0.5,
6057            "Fdata LS: expected period ≈ {}, got {}",
6058            period,
6059            result.peak_period
6060        );
6061        assert!(!result.frequencies.is_empty());
6062    }
6063
6064    // ========================================================================
6065    // Seasonality change detection tests
6066    // ========================================================================
6067
6068    #[test]
6069    fn test_detect_seasonality_changes_onset() {
6070        let m = 400;
6071        let period = 2.0;
6072        let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.05).collect(); // 0..20
6073
6074        // First half: noise-like (low seasonality), second half: strong sine
6075        let data: Vec<f64> = argvals
6076            .iter()
6077            .map(|&t| {
6078                if t < 10.0 {
6079                    // Weak signal
6080                    0.05 * ((t * 13.0).sin() + (t * 7.0).cos())
6081                } else {
6082                    // Strong seasonal
6083                    (2.0 * PI * t / period).sin()
6084                }
6085            })
6086            .collect();
6087
6088        let result = detect_seasonality_changes(&data, 1, m, &argvals, period, 0.3, 4.0, 2.0);
6089
6090        assert!(
6091            !result.strength_curve.is_empty(),
6092            "Strength curve should not be empty"
6093        );
6094        assert_eq!(result.strength_curve.len(), m);
6095
6096        // Should detect at least one change point (onset around t=10)
6097        if !result.change_points.is_empty() {
6098            let onset_points: Vec<_> = result
6099                .change_points
6100                .iter()
6101                .filter(|cp| cp.change_type == ChangeType::Onset)
6102                .collect();
6103            // At least one onset should exist near the transition
6104            assert!(!onset_points.is_empty(), "Should detect Onset change point");
6105        }
6106    }
6107
6108    #[test]
6109    fn test_detect_seasonality_changes_no_change() {
6110        let m = 400;
6111        let period = 2.0;
6112        let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.05).collect();
6113
6114        // Consistently strong seasonal signal
6115        let data: Vec<f64> = argvals
6116            .iter()
6117            .map(|&t| (2.0 * PI * t / period).sin())
6118            .collect();
6119
6120        let result = detect_seasonality_changes(&data, 1, m, &argvals, period, 0.3, 4.0, 2.0);
6121
6122        assert!(!result.strength_curve.is_empty());
6123        // With consistently seasonal data, there should be no Cessation points
6124        let cessation_points: Vec<_> = result
6125            .change_points
6126            .iter()
6127            .filter(|cp| cp.change_type == ChangeType::Cessation)
6128            .collect();
6129        assert!(
6130            cessation_points.is_empty(),
6131            "Consistently seasonal signal should have no Cessation points, found {}",
6132            cessation_points.len()
6133        );
6134    }
6135
6136    // ========================================================================
6137    // Period estimation tests (ACF and regression)
6138    // ========================================================================
6139
6140    #[test]
6141    fn test_estimate_period_acf() {
6142        let m = 200;
6143        let period = 2.0;
6144        let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.1).collect();
6145        let data = generate_sine(1, m, period, &argvals);
6146
6147        let estimate = estimate_period_acf(&data, 1, m, &argvals, m / 2);
6148
6149        assert!(
6150            estimate.period.is_finite(),
6151            "ACF period estimate should be finite"
6152        );
6153        assert!(
6154            (estimate.period - period).abs() < 0.5,
6155            "ACF expected period ≈ {}, got {}",
6156            period,
6157            estimate.period
6158        );
6159        assert!(
6160            estimate.confidence > 0.0,
6161            "ACF confidence should be positive"
6162        );
6163    }
6164
6165    #[test]
6166    fn test_estimate_period_regression() {
6167        let m = 200;
6168        let period = 2.0;
6169        let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.1).collect();
6170        let data = generate_sine(1, m, period, &argvals);
6171
6172        let estimate = estimate_period_regression(&data, 1, m, &argvals, 1.5, 3.0, 100, 3);
6173
6174        assert!(
6175            estimate.period.is_finite(),
6176            "Regression period estimate should be finite"
6177        );
6178        assert!(
6179            (estimate.period - period).abs() < 0.5,
6180            "Regression expected period ≈ {}, got {}",
6181            period,
6182            estimate.period
6183        );
6184        assert!(
6185            estimate.confidence > 0.0,
6186            "Regression confidence should be positive"
6187        );
6188    }
6189
6190    // ========================================================================
6191    // Seasonal strength windowed test
6192    // ========================================================================
6193
6194    #[test]
6195    fn test_seasonal_strength_windowed_variance() {
6196        let m = 200;
6197        let period = 2.0;
6198        let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.1).collect();
6199        let data = generate_sine(1, m, period, &argvals);
6200
6201        let strengths = seasonal_strength_windowed(
6202            &data,
6203            1,
6204            m,
6205            &argvals,
6206            period,
6207            4.0, // window_size
6208            StrengthMethod::Variance,
6209        );
6210
6211        assert_eq!(strengths.len(), m, "Should return m values");
6212
6213        // Interior values (away from edges) should be in [0,1]
6214        let interior_start = m / 4;
6215        let interior_end = 3 * m / 4;
6216        for i in interior_start..interior_end {
6217            let s = strengths[i];
6218            if s.is_finite() {
6219                assert!(
6220                    (-0.01..=1.01).contains(&s),
6221                    "Windowed strength at {} should be near [0,1], got {}",
6222                    i,
6223                    s
6224                );
6225            }
6226        }
6227    }
6228}