mod autoperiod;
mod change;
mod hilbert;
mod lomb_scargle;
mod matrix_profile;
mod peak;
mod period;
mod sazed;
mod ssa;
mod strength;
#[cfg(test)]
mod tests;
use crate::iter_maybe_parallel;
use crate::matrix::FdMatrix;
use num_complex::Complex;
#[cfg(feature = "parallel")]
use rayon::iter::ParallelIterator;
use rustfft::FftPlanner;
use std::f64::consts::PI;
pub use autoperiod::{
autoperiod, autoperiod_fdata, cfd_autoperiod, cfd_autoperiod_fdata, AutoperiodCandidate,
AutoperiodResult, CfdAutoperiodResult,
};
pub use change::{
analyze_peak_timing, classify_seasonality, detect_amplitude_modulation,
detect_amplitude_modulation_wavelet, detect_seasonality_changes,
detect_seasonality_changes_auto, AmplitudeModulationResult, ModulationType, SeasonalType,
SeasonalityClassification, ThresholdMethod, WaveletAmplitudeResult,
};
pub use hilbert::{hilbert_transform, instantaneous_period};
pub use lomb_scargle::{lomb_scargle, lomb_scargle_fdata, LombScargleResult};
pub use matrix_profile::{
matrix_profile, matrix_profile_fdata, matrix_profile_seasonality, MatrixProfileResult,
};
pub use peak::detect_peaks;
pub use period::{
detect_multiple_periods, estimate_period_acf, estimate_period_fft, estimate_period_regression,
};
pub use sazed::{sazed, sazed_fdata, SazedComponents, SazedResult};
pub use ssa::{ssa, ssa_fdata, ssa_seasonality, SsaResult};
pub use strength::{
seasonal_strength_spectral, seasonal_strength_variance, seasonal_strength_wavelet,
seasonal_strength_windowed,
};
pub use self::types::*;
mod types {
#[derive(Debug, Clone)]
#[non_exhaustive]
pub struct PeriodEstimate {
pub period: f64,
pub frequency: f64,
pub power: f64,
pub confidence: f64,
}
#[derive(Debug, Clone)]
#[non_exhaustive]
pub struct Peak {
pub time: f64,
pub value: f64,
pub prominence: f64,
}
#[derive(Debug, Clone)]
#[non_exhaustive]
pub struct PeakDetectionResult {
pub peaks: Vec<Vec<Peak>>,
pub inter_peak_distances: Vec<Vec<f64>>,
pub mean_period: f64,
}
#[derive(Debug, Clone)]
#[non_exhaustive]
pub struct DetectedPeriod {
pub period: f64,
pub confidence: f64,
pub strength: f64,
pub amplitude: f64,
pub phase: f64,
pub iteration: usize,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
#[non_exhaustive]
pub enum StrengthMethod {
Variance,
Spectral,
}
#[derive(Debug, Clone)]
#[non_exhaustive]
pub struct ChangePoint {
pub time: f64,
pub change_type: ChangeType,
pub strength_before: f64,
pub strength_after: f64,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
#[non_exhaustive]
pub enum ChangeType {
Onset,
Cessation,
}
#[derive(Debug, Clone)]
#[non_exhaustive]
pub struct ChangeDetectionResult {
pub change_points: Vec<ChangePoint>,
pub strength_curve: Vec<f64>,
}
#[derive(Debug, Clone)]
#[non_exhaustive]
pub struct InstantaneousPeriod {
pub period: Vec<f64>,
pub frequency: Vec<f64>,
pub amplitude: Vec<f64>,
}
#[derive(Debug, Clone, PartialEq)]
#[non_exhaustive]
pub struct PeakTimingResult {
pub peak_times: Vec<f64>,
pub peak_values: Vec<f64>,
pub normalized_timing: Vec<f64>,
pub mean_timing: f64,
pub std_timing: f64,
pub range_timing: f64,
pub variability_score: f64,
pub timing_trend: f64,
pub cycle_indices: Vec<usize>,
}
}
#[inline]
pub(super) fn compute_mean_curve_impl(data: &FdMatrix, parallel: bool) -> Vec<f64> {
let (n, m) = data.shape();
if parallel && m >= 100 {
iter_maybe_parallel!(0..m)
.map(|j| {
let mut sum = 0.0;
for i in 0..n {
sum += data[(i, j)];
}
sum / n as f64
})
.collect()
} else {
(0..m)
.map(|j| {
let mut sum = 0.0;
for i in 0..n {
sum += data[(i, j)];
}
sum / n as f64
})
.collect()
}
}
#[inline]
pub(super) fn compute_mean_curve(data: &FdMatrix) -> Vec<f64> {
compute_mean_curve_impl(data, true)
}
#[inline]
pub(super) fn interior_bounds(m: usize) -> Option<(usize, usize)> {
let edge_skip = (m as f64 * 0.1) as usize;
let interior_start = edge_skip.min(m / 4);
let interior_end = m.saturating_sub(edge_skip).max(m * 3 / 4);
if interior_end <= interior_start {
None
} else {
Some((interior_start, interior_end))
}
}
pub(super) fn valid_interior_bounds(m: usize, min_span: usize) -> Option<(usize, usize)> {
interior_bounds(m).filter(|&(s, e)| e > s + min_span)
}
pub(super) fn periodogram(data: &[f64], argvals: &[f64]) -> (Vec<f64>, Vec<f64>) {
let n = data.len();
if n < 2 || argvals.len() != n {
return (Vec::new(), Vec::new());
}
let dt = (argvals[n - 1] - argvals[0]) / (n - 1) as f64;
let fs = 1.0 / dt;
let mut planner = FftPlanner::<f64>::new();
let fft = planner.plan_fft_forward(n);
let mut buffer: Vec<Complex<f64>> = data.iter().map(|&x| Complex::new(x, 0.0)).collect();
fft.process(&mut buffer);
let n_freq = n / 2 + 1;
let mut power = Vec::with_capacity(n_freq);
let mut frequencies = Vec::with_capacity(n_freq);
for k in 0..n_freq {
let freq = k as f64 * fs / n as f64;
frequencies.push(freq);
let p = buffer[k].norm_sqr() / (n as f64 * n as f64);
let p = if k > 0 && k < n / 2 { 2.0 * p } else { p };
power.push(p);
}
(frequencies, power)
}
pub(super) fn autocorrelation_naive(data: &[f64], max_lag: usize, mean: f64, var: f64) -> Vec<f64> {
let n = data.len();
let max_lag = max_lag.min(n - 1);
let mut acf = Vec::with_capacity(max_lag + 1);
for lag in 0..=max_lag {
let mut sum = 0.0;
for i in 0..(n - lag) {
sum += (data[i] - mean) * (data[i + lag] - mean);
}
acf.push(sum / (n as f64 * var));
}
acf
}
pub(super) fn autocorrelation(data: &[f64], max_lag: usize) -> Vec<f64> {
let n = data.len();
if n == 0 {
return Vec::new();
}
let mean: f64 = data.iter().sum::<f64>() / n as f64;
let var: f64 = data.iter().map(|&x| (x - mean).powi(2)).sum::<f64>() / n as f64;
if var < 1e-15 {
return vec![1.0; max_lag.min(n) + 1];
}
let max_lag = max_lag.min(n - 1);
if n <= 64 {
return autocorrelation_naive(data, max_lag, mean, var);
}
let fft_len = (2 * n).next_power_of_two();
let mut planner = FftPlanner::<f64>::new();
let fft_forward = planner.plan_fft_forward(fft_len);
let fft_inverse = planner.plan_fft_inverse(fft_len);
let mut buffer: Vec<Complex<f64>> = Vec::with_capacity(fft_len);
for &x in data {
buffer.push(Complex::new(x - mean, 0.0));
}
buffer.resize(fft_len, Complex::new(0.0, 0.0));
fft_forward.process(&mut buffer);
for c in &mut buffer {
*c = Complex::new(c.norm_sqr(), 0.0);
}
fft_inverse.process(&mut buffer);
let norm = fft_len as f64 * n as f64 * var;
(0..=max_lag).map(|lag| buffer[lag].re / norm).collect()
}
pub(super) fn try_add_peak(
peaks: &mut Vec<usize>,
candidate: usize,
signal: &[f64],
min_distance: usize,
) {
if let Some(&last) = peaks.last() {
if candidate - last >= min_distance {
peaks.push(candidate);
} else if signal[candidate] > signal[last] {
*peaks.last_mut().unwrap_or(&mut 0) = candidate;
}
} else {
peaks.push(candidate);
}
}
pub(crate) fn find_peaks_1d(signal: &[f64], min_distance: usize) -> Vec<usize> {
let n = signal.len();
if n < 3 {
return Vec::new();
}
let mut peaks = Vec::new();
for i in 1..(n - 1) {
if signal[i] > signal[i - 1] && signal[i] > signal[i + 1] {
try_add_peak(&mut peaks, i, signal, min_distance);
}
}
peaks
}
pub(crate) fn compute_prominence(signal: &[f64], peak_idx: usize) -> f64 {
let n = signal.len();
let peak_val = signal[peak_idx];
let mut left_min = peak_val;
for i in (0..peak_idx).rev() {
if signal[i] >= peak_val {
break;
}
left_min = left_min.min(signal[i]);
}
let mut right_min = peak_val;
for i in (peak_idx + 1)..n {
if signal[i] >= peak_val {
break;
}
right_min = right_min.min(signal[i]);
}
peak_val - left_min.max(right_min)
}
pub(super) fn unwrap_phase(phase: &[f64]) -> Vec<f64> {
if phase.is_empty() {
return Vec::new();
}
let mut unwrapped = vec![phase[0]];
let mut cumulative_correction = 0.0;
for i in 1..phase.len() {
let diff = phase[i] - phase[i - 1];
if diff > PI {
cumulative_correction -= 2.0 * PI;
} else if diff < -PI {
cumulative_correction += 2.0 * PI;
}
unwrapped.push(phase[i] + cumulative_correction);
}
unwrapped
}
pub(super) fn morlet_wavelet(t: f64, omega0: f64) -> Complex<f64> {
let gaussian = (-t * t / 2.0).exp();
let oscillation = Complex::new((omega0 * t).cos(), (omega0 * t).sin());
oscillation * gaussian
}
#[allow(dead_code)]
pub(super) fn cwt_morlet(
signal: &[f64],
argvals: &[f64],
scale: f64,
omega0: f64,
) -> Vec<Complex<f64>> {
let n = signal.len();
if n == 0 || scale <= 0.0 {
return Vec::new();
}
let dt = (argvals[n - 1] - argvals[0]) / (n - 1) as f64;
let norm = 1.0 / scale.sqrt();
(0..n)
.map(|b| {
let mut sum = Complex::new(0.0, 0.0);
for k in 0..n {
let t_normalized = (argvals[k] - argvals[b]) / scale;
if t_normalized.abs() < 6.0 {
let wavelet = morlet_wavelet(t_normalized, omega0);
sum += signal[k] * wavelet.conj();
}
}
sum * norm * dt
})
.collect()
}
pub(super) fn cwt_morlet_fft(
signal: &[f64],
argvals: &[f64],
scale: f64,
omega0: f64,
) -> Vec<Complex<f64>> {
let n = signal.len();
if n == 0 || scale <= 0.0 {
return Vec::new();
}
let dt = (argvals[n - 1] - argvals[0]) / (n - 1) as f64;
let norm = 1.0 / scale.sqrt();
let wavelet_time: Vec<Complex<f64>> = (0..n)
.map(|k| {
let t = if k <= n / 2 {
k as f64 * dt / scale
} else {
(k as f64 - n as f64) * dt / scale
};
morlet_wavelet(t, omega0) * norm
})
.collect();
let mut planner = FftPlanner::<f64>::new();
let fft_forward = planner.plan_fft_forward(n);
let fft_inverse = planner.plan_fft_inverse(n);
let mut signal_fft: Vec<Complex<f64>> = signal.iter().map(|&x| Complex::new(x, 0.0)).collect();
fft_forward.process(&mut signal_fft);
let mut wavelet_fft = wavelet_time;
fft_forward.process(&mut wavelet_fft);
let mut result: Vec<Complex<f64>> = signal_fft
.iter()
.zip(wavelet_fft.iter())
.map(|(s, w)| *s * w.conj())
.collect();
fft_inverse.process(&mut result);
for c in &mut result {
*c *= dt / n as f64;
}
result
}
pub(super) fn otsu_threshold(values: &[f64]) -> f64 {
if values.is_empty() {
return 0.5;
}
let valid: Vec<f64> = values.iter().copied().filter(|x| x.is_finite()).collect();
if valid.is_empty() {
return 0.5;
}
let vmin = valid.iter().copied().fold(f64::INFINITY, f64::min);
let vmax = valid.iter().copied().fold(f64::NEG_INFINITY, f64::max);
if (vmax - vmin).abs() < 1e-10 {
return (vmin + vmax) / 2.0;
}
let n_bins = 256;
let (histogram, hist_min, bin_width) = build_histogram(&valid, n_bins);
let (best_bin, _) = find_optimal_threshold_bin(&histogram, valid.len() as f64);
hist_min + (best_bin as f64 + 0.5) * bin_width
}
pub(super) fn linear_slope(x: &[f64], y: &[f64]) -> f64 {
if x.len() != y.len() || x.len() < 2 {
return 0.0;
}
let n = x.len() as f64;
let mean_x: f64 = x.iter().sum::<f64>() / n;
let mean_y: f64 = y.iter().sum::<f64>() / n;
let mut num = 0.0;
let mut den = 0.0;
for (&xi, &yi) in x.iter().zip(y.iter()) {
num += (xi - mean_x) * (yi - mean_y);
den += (xi - mean_x).powi(2);
}
if den.abs() < 1e-15 {
0.0
} else {
num / den
}
}
pub(super) struct AmplitudeEnvelopeStats {
pub(super) modulation_score: f64,
pub(super) amplitude_trend: f64,
pub(super) has_modulation: bool,
pub(super) modulation_type: change::ModulationType,
pub(super) _mean_amp: f64,
pub(super) min_amp: f64,
pub(super) max_amp: f64,
}
pub(super) fn analyze_amplitude_envelope(
interior_envelope: &[f64],
interior_times: &[f64],
modulation_threshold: f64,
) -> AmplitudeEnvelopeStats {
let n_interior = interior_envelope.len() as f64;
let mean_amp = interior_envelope.iter().sum::<f64>() / n_interior;
let min_amp = interior_envelope
.iter()
.copied()
.fold(f64::INFINITY, f64::min);
let max_amp = interior_envelope
.iter()
.copied()
.fold(f64::NEG_INFINITY, f64::max);
let variance = interior_envelope
.iter()
.map(|&a| (a - mean_amp).powi(2))
.sum::<f64>()
/ n_interior;
let std_amp = variance.sqrt();
let modulation_score = if mean_amp > 1e-10 {
std_amp / mean_amp
} else {
0.0
};
let t_mean = interior_times.iter().sum::<f64>() / n_interior;
let mut cov_ta = 0.0;
let mut var_t = 0.0;
for (&t, &a) in interior_times.iter().zip(interior_envelope.iter()) {
cov_ta += (t - t_mean) * (a - mean_amp);
var_t += (t - t_mean).powi(2);
}
let slope = if var_t > 1e-10 { cov_ta / var_t } else { 0.0 };
let time_span = interior_times.last().unwrap_or(&1.0) - interior_times.first().unwrap_or(&0.0);
let amplitude_trend = if mean_amp > 1e-10 && time_span > 1e-10 {
(slope * time_span / mean_amp).clamp(-1.0, 1.0)
} else {
0.0
};
let has_modulation = modulation_score > modulation_threshold;
let modulation_type = if !has_modulation {
change::ModulationType::Stable
} else if amplitude_trend > 0.3 {
change::ModulationType::Emerging
} else if amplitude_trend < -0.3 {
change::ModulationType::Fading
} else {
change::ModulationType::Oscillating
};
AmplitudeEnvelopeStats {
modulation_score,
amplitude_trend,
has_modulation,
modulation_type,
_mean_amp: mean_amp,
min_amp,
max_amp,
}
}
pub(super) fn fit_and_subtract_sinusoid(
residual: &mut [f64],
argvals: &[f64],
period: f64,
) -> (f64, f64, f64, f64) {
let m = residual.len();
let omega = 2.0 * PI / period;
let mut cos_sum = 0.0;
let mut sin_sum = 0.0;
for (j, &t) in argvals.iter().enumerate() {
cos_sum += residual[j] * (omega * t).cos();
sin_sum += residual[j] * (omega * t).sin();
}
let a = 2.0 * cos_sum / m as f64;
let b = 2.0 * sin_sum / m as f64;
let amplitude = (a * a + b * b).sqrt();
let phase = b.atan2(a);
for (j, &t) in argvals.iter().enumerate() {
residual[j] -= a * (omega * t).cos() + b * (omega * t).sin();
}
(a, b, amplitude, phase)
}
pub(super) fn validate_sazed_component(
period: f64,
confidence: f64,
min_period: f64,
max_period: f64,
threshold: f64,
) -> Option<f64> {
if period.is_finite() && period > min_period && period < max_period && confidence > threshold {
Some(period)
} else {
None
}
}
pub(super) fn count_agreeing_periods(
periods: &[f64],
reference: f64,
tolerance: f64,
) -> (usize, f64) {
let mut count = 0;
let mut sum = 0.0;
for &p in periods {
let rel_diff = (reference - p).abs() / reference.max(p);
if rel_diff <= tolerance {
count += 1;
sum += p;
}
}
(count, sum)
}
pub(super) fn find_acf_descent_end(acf: &[f64]) -> usize {
for i in 1..acf.len() {
if acf[i] < 0.0 {
return i;
}
if i > 1 && acf[i] > acf[i - 1] {
return i - 1;
}
}
1
}
pub(super) fn find_first_acf_peak(acf: &[f64]) -> Option<(usize, f64)> {
if acf.len() < 4 {
return None;
}
let min_search_start = find_acf_descent_end(acf);
let peaks = find_peaks_1d(&acf[min_search_start..], 1);
if peaks.is_empty() {
return None;
}
let peak_lag = peaks[0] + min_search_start;
Some((peak_lag, acf[peak_lag].max(0.0)))
}
pub(super) fn compute_cycle_strengths(
data: &FdMatrix,
argvals: &[f64],
period: f64,
strength_thresh: f64,
) -> (Vec<f64>, Vec<usize>) {
let (n, m) = data.shape();
let t_start = argvals[0];
let t_end = argvals[m - 1];
let n_cycles = ((t_end - t_start) / period).floor() as usize;
let mut cycle_strengths = Vec::with_capacity(n_cycles);
let mut weak_seasons = Vec::new();
for cycle in 0..n_cycles {
let cycle_start = t_start + cycle as f64 * period;
let cycle_end = cycle_start + period;
let start_idx = argvals.iter().position(|&t| t >= cycle_start).unwrap_or(0);
let end_idx = argvals.iter().position(|&t| t > cycle_end).unwrap_or(m);
let cycle_m = end_idx - start_idx;
if cycle_m < 4 {
cycle_strengths.push(f64::NAN);
continue;
}
let cycle_data: Vec<f64> = (start_idx..end_idx)
.flat_map(|j| (0..n).map(move |i| data[(i, j)]))
.collect();
let Ok(cycle_mat) = FdMatrix::from_column_major(cycle_data, n, cycle_m) else {
cycle_strengths.push(f64::NAN);
continue;
};
let cycle_argvals: Vec<f64> = argvals[start_idx..end_idx].to_vec();
let strength_val =
strength::seasonal_strength_variance(&cycle_mat, &cycle_argvals, period, 3);
cycle_strengths.push(strength_val);
if strength_val < strength_thresh {
weak_seasons.push(cycle);
}
}
(cycle_strengths, weak_seasons)
}
pub(super) fn build_histogram(valid: &[f64], n_bins: usize) -> (Vec<usize>, f64, f64) {
let min_val = valid.iter().copied().fold(f64::INFINITY, f64::min);
let max_val = valid.iter().copied().fold(f64::NEG_INFINITY, f64::max);
let bin_width = (max_val - min_val) / n_bins as f64;
let mut histogram = vec![0usize; n_bins];
for &v in valid {
let bin = ((v - min_val) / bin_width).min(n_bins as f64 - 1.0) as usize;
histogram[bin] += 1;
}
(histogram, min_val, bin_width)
}
pub(super) fn find_optimal_threshold_bin(histogram: &[usize], total: f64) -> (usize, f64) {
let n_bins = histogram.len();
let mut sum_total = 0.0;
for (i, &count) in histogram.iter().enumerate() {
sum_total += i as f64 * count as f64;
}
let mut best_bin = 0;
let mut best_variance = 0.0;
let mut sum_b = 0.0;
let mut weight_b = 0.0;
for t in 0..n_bins {
weight_b += histogram[t] as f64;
if weight_b == 0.0 {
continue;
}
let weight_f = total - weight_b;
if weight_f == 0.0 {
break;
}
sum_b += t as f64 * histogram[t] as f64;
let mean_b = sum_b / weight_b;
let mean_f = (sum_total - sum_b) / weight_f;
let variance = weight_b * weight_f * (mean_b - mean_f).powi(2);
if variance > best_variance {
best_variance = variance;
best_bin = t;
}
}
(best_bin, best_variance)
}
pub(super) fn sum_harmonic_power(
frequencies: &[f64],
power: &[f64],
fundamental_freq: f64,
tolerance: f64,
) -> (f64, f64) {
let mut seasonal_power = 0.0;
let mut total_power = 0.0;
for (i, (&freq, &p)) in frequencies.iter().zip(power.iter()).enumerate() {
if i == 0 {
continue;
}
total_power += p;
let ratio = freq / fundamental_freq;
let nearest_harmonic = ratio.round();
if (ratio - nearest_harmonic).abs() < tolerance && nearest_harmonic >= 1.0 {
seasonal_power += p;
}
}
(seasonal_power, total_power)
}
pub(super) fn crossing_direction(
ss: f64,
threshold: f64,
in_seasonal: bool,
i: usize,
last_change_idx: Option<usize>,
min_dur_points: usize,
) -> Option<bool> {
if ss.is_nan() {
return None;
}
let now_seasonal = ss > threshold;
if now_seasonal == in_seasonal {
return None;
}
if last_change_idx.is_some_and(|last_idx| i - last_idx < min_dur_points) {
return None;
}
Some(now_seasonal)
}
pub(super) fn build_change_point(
i: usize,
ss: f64,
now_seasonal: bool,
strength_curve: &[f64],
argvals: &[f64],
) -> ChangePoint {
let change_type = if now_seasonal {
ChangeType::Onset
} else {
ChangeType::Cessation
};
let strength_before = if i > 0 && !strength_curve[i - 1].is_nan() {
strength_curve[i - 1]
} else {
ss
};
ChangePoint {
time: argvals[i],
change_type,
strength_before,
strength_after: ss,
}
}
pub(super) fn detect_threshold_crossings(
strength_curve: &[f64],
argvals: &[f64],
threshold: f64,
min_dur_points: usize,
) -> Vec<ChangePoint> {
let mut change_points = Vec::new();
let mut in_seasonal = strength_curve[0] > threshold;
let mut last_change_idx: Option<usize> = None;
for (i, &ss) in strength_curve.iter().enumerate().skip(1) {
let Some(now_seasonal) = crossing_direction(
ss,
threshold,
in_seasonal,
i,
last_change_idx,
min_dur_points,
) else {
continue;
};
change_points.push(build_change_point(
i,
ss,
now_seasonal,
strength_curve,
argvals,
));
in_seasonal = now_seasonal;
last_change_idx = Some(i);
}
change_points
}
pub(super) fn find_spectral_peaks(power: &[f64]) -> Vec<usize> {
if power.len() < 3 {
return Vec::new();
}
let mut sorted_power: Vec<f64> = power.to_vec();
crate::helpers::sort_nan_safe(&mut sorted_power);
let noise_floor = sorted_power[sorted_power.len() / 2];
let threshold = noise_floor * 2.0;
let mut peaks: Vec<(usize, f64)> = Vec::new();
for i in 1..(power.len() - 1) {
if power[i] > power[i - 1] && power[i] > power[i + 1] && power[i] > threshold {
peaks.push((i, power[i]));
}
}
peaks.sort_by(|a, b| b.1.partial_cmp(&a.1).unwrap_or(std::cmp::Ordering::Equal));
peaks.into_iter().map(|(idx, _)| idx).collect()
}
pub(super) fn find_consensus_period(periods: &[f64], tolerance: f64) -> (f64, usize) {
if periods.is_empty() {
return (f64::NAN, 0);
}
if periods.len() == 1 {
return (periods[0], 1);
}
let mut best_period = periods[0];
let mut best_count = 0;
let mut best_sum = 0.0;
for &p1 in periods {
let (count, sum) = count_agreeing_periods(periods, p1, tolerance);
if count > best_count
|| (count == best_count && sum / count as f64 > best_sum / best_count.max(1) as f64)
{
best_count = count;
best_period = sum / count as f64;
best_sum = sum;
}
}
(best_period, best_count)
}
pub(super) fn validate_period_acf(acf: &[f64], period: f64, dt: f64) -> f64 {
let lag = (period / dt).round() as usize;
if lag == 0 || lag >= acf.len() {
return 0.0;
}
let acf_at_lag = acf[lag];
let half_lag = lag / 2;
let double_lag = lag * 2;
let mut score = acf_at_lag.max(0.0);
if half_lag > 0 && half_lag < acf.len() {
let half_acf = acf[half_lag];
if half_acf > acf_at_lag * 0.7 {
score *= 0.5;
}
}
if double_lag < acf.len() {
let double_acf = acf[double_lag];
if double_acf > 0.3 {
score *= 1.2;
}
}
score.min(1.0)
}
pub(super) fn refine_period_gradient(
acf: &[f64],
initial_period: f64,
dt: f64,
steps: usize,
) -> f64 {
let mut period = initial_period;
let step_size = dt * 0.5;
for _ in 0..steps {
let current_score = validate_period_acf(acf, period, dt);
let left_score = validate_period_acf(acf, period - step_size, dt);
let right_score = validate_period_acf(acf, period + step_size, dt);
if left_score > current_score && left_score > right_score {
period -= step_size;
} else if right_score > current_score {
period += step_size;
}
}
period.max(dt) }
pub(super) fn cluster_periods(
candidates: &[(f64, f64)],
tolerance: f64,
min_size: usize,
) -> Vec<(f64, f64)> {
if candidates.is_empty() {
return Vec::new();
}
let mut sorted: Vec<(f64, f64)> = candidates.to_vec();
sorted.sort_by(|a, b| a.0.partial_cmp(&b.0).unwrap_or(std::cmp::Ordering::Equal));
let mut clusters: Vec<(f64, f64)> = Vec::new(); let mut current_cluster: Vec<(f64, f64)> = vec![sorted[0]];
for &(period, power) in sorted.iter().skip(1) {
let cluster_center =
current_cluster.iter().map(|(p, _)| p).sum::<f64>() / current_cluster.len() as f64;
let rel_diff = (period - cluster_center).abs() / cluster_center.max(period);
if rel_diff <= tolerance {
current_cluster.push((period, power));
} else {
if current_cluster.len() >= min_size {
let center = current_cluster.iter().map(|(p, pw)| p * pw).sum::<f64>()
/ current_cluster
.iter()
.map(|(_, pw)| pw)
.sum::<f64>()
.max(1e-15);
let total_power: f64 = current_cluster.iter().map(|(_, pw)| pw).sum();
clusters.push((center, total_power));
}
current_cluster = vec![(period, power)];
}
}
if current_cluster.len() >= min_size {
let center = current_cluster.iter().map(|(p, pw)| p * pw).sum::<f64>()
/ current_cluster
.iter()
.map(|(_, pw)| pw)
.sum::<f64>()
.max(1e-15);
let total_power: f64 = current_cluster.iter().map(|(_, pw)| pw).sum();
clusters.push((center, total_power));
}
clusters
}