use crate::error::{Result, TimeSeriesError};
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum SeasonalType {
Additive,
Multiplicative,
}
#[derive(Debug, Clone)]
pub struct SimpleExponentialSmoothing {
alpha: f64,
initial_level: f64,
}
impl SimpleExponentialSmoothing {
pub fn fit(data: &[f64], alpha: Option<f64>) -> Result<Self> {
if data.len() < 2 {
return Err(TimeSeriesError::InsufficientData {
message: "SES requires at least 2 observations".to_string(),
required: 2,
actual: data.len(),
});
}
let alpha = match alpha {
Some(a) => {
if !(0.0 < a && a <= 1.0) {
return Err(TimeSeriesError::InvalidParameter {
name: "alpha".to_string(),
message: "alpha must be in (0, 1]".to_string(),
});
}
a
}
None => optimize_alpha_ses(data)?,
};
let level = compute_ses_level(data, alpha, data[0]);
Ok(Self {
alpha,
initial_level: level,
})
}
pub fn forecast(&self, h: usize) -> Vec<f64> {
vec![self.initial_level; h]
}
pub fn fitted_values(&self, data: &[f64]) -> Vec<f64> {
if data.is_empty() {
return Vec::new();
}
let mut fitted = Vec::with_capacity(data.len());
let mut level = data[0];
fitted.push(level);
for &y in data.iter().take(data.len() - 1) {
level = self.alpha * y + (1.0 - self.alpha) * level;
fitted.push(level);
}
fitted
}
pub fn alpha(&self) -> f64 {
self.alpha
}
pub fn level(&self) -> f64 {
self.initial_level
}
}
fn compute_ses_level(data: &[f64], alpha: f64, l0: f64) -> f64 {
let mut level = l0;
for &y in data {
level = alpha * y + (1.0 - alpha) * level;
}
level
}
fn ses_sse(data: &[f64], alpha: f64) -> f64 {
let mut level = data[0];
let mut sse = 0.0;
for &y in data.iter().skip(1) {
let err = y - level;
sse += err * err;
level = alpha * y + (1.0 - alpha) * level;
}
sse
}
fn optimize_alpha_ses(data: &[f64]) -> Result<f64> {
let golden_ratio = (5.0_f64.sqrt() - 1.0) / 2.0;
let mut a = 1e-4_f64;
let mut b = 1.0_f64 - 1e-10;
let tol = 1e-7;
let mut x1 = b - golden_ratio * (b - a);
let mut x2 = a + golden_ratio * (b - a);
let mut f1 = ses_sse(data, x1);
let mut f2 = ses_sse(data, x2);
for _ in 0..200 {
if (b - a).abs() < tol {
break;
}
if f1 < f2 {
b = x2;
x2 = x1;
f2 = f1;
x1 = b - golden_ratio * (b - a);
f1 = ses_sse(data, x1);
} else {
a = x1;
x1 = x2;
f1 = f2;
x2 = a + golden_ratio * (b - a);
f2 = ses_sse(data, x2);
}
}
let best = (a + b) / 2.0;
if best <= 0.0 || best > 1.0 {
return Err(TimeSeriesError::OptimizationError(
"SES alpha optimization produced out-of-range value".to_string(),
));
}
Ok(best)
}
#[derive(Debug, Clone)]
pub struct HoltLinear {
alpha: f64,
beta: f64,
level: f64,
trend: f64,
}
impl HoltLinear {
pub fn fit(
data: &[f64],
alpha: Option<f64>,
beta: Option<f64>,
) -> Result<Self> {
if data.len() < 4 {
return Err(TimeSeriesError::InsufficientData {
message: "Holt linear requires at least 4 observations".to_string(),
required: 4,
actual: data.len(),
});
}
validate_smoothing_param("alpha", alpha)?;
validate_smoothing_param("beta", beta)?;
let (opt_alpha, opt_beta) = match (alpha, beta) {
(Some(a), Some(b)) => (a, b),
_ => optimize_holt_params(data, alpha, beta)?,
};
let (l, b) = holt_final_state(data, opt_alpha, opt_beta);
Ok(Self {
alpha: opt_alpha,
beta: opt_beta,
level: l,
trend: b,
})
}
pub fn forecast(&self, h: usize) -> Vec<f64> {
(1..=h)
.map(|k| self.level + (k as f64) * self.trend)
.collect()
}
pub fn damped_forecast(&self, h: usize, phi: f64) -> Vec<f64> {
if !(0.0 < phi && phi < 1.0) {
return self.forecast(h);
}
(1..=h)
.map(|k| {
let phi_sum = phi * (1.0 - phi.powi(k as i32)) / (1.0 - phi);
self.level + phi_sum * self.trend
})
.collect()
}
pub fn fitted_values(&self, data: &[f64]) -> Vec<f64> {
if data.len() < 2 {
return data.to_vec();
}
let mut level = data[0];
let mut trend = data[1] - data[0];
let mut fitted = Vec::with_capacity(data.len());
fitted.push(level + trend);
for &y in data.iter().skip(1) {
let l_prev = level;
level = self.alpha * y + (1.0 - self.alpha) * (level + trend);
trend = self.beta * (level - l_prev) + (1.0 - self.beta) * trend;
fitted.push(level + trend);
}
fitted
}
pub fn alpha(&self) -> f64 {
self.alpha
}
pub fn beta(&self) -> f64 {
self.beta
}
}
fn validate_smoothing_param(name: &str, val: Option<f64>) -> Result<()> {
if let Some(v) = val {
if !(0.0 < v && v <= 1.0) {
return Err(TimeSeriesError::InvalidParameter {
name: name.to_string(),
message: format!("{name} must be in (0, 1], got {v}"),
});
}
}
Ok(())
}
fn holt_final_state(data: &[f64], alpha: f64, beta: f64) -> (f64, f64) {
let mut level = data[0];
let mut trend = data[1] - data[0];
for &y in data.iter().skip(1) {
let l_prev = level;
level = alpha * y + (1.0 - alpha) * (level + trend);
trend = beta * (level - l_prev) + (1.0 - beta) * trend;
}
(level, trend)
}
fn holt_sse(data: &[f64], alpha: f64, beta: f64) -> f64 {
if data.len() < 2 {
return f64::INFINITY;
}
let mut level = data[0];
let mut trend = data[1] - data[0];
let mut sse = 0.0;
for &y in data.iter().skip(1) {
let forecast = level + trend;
let err = y - forecast;
sse += err * err;
let l_prev = level;
level = alpha * y + (1.0 - alpha) * (level + trend);
trend = beta * (level - l_prev) + (1.0 - beta) * trend;
}
sse
}
fn optimize_holt_params(
data: &[f64],
fixed_alpha: Option<f64>,
fixed_beta: Option<f64>,
) -> Result<(f64, f64)> {
let grid: Vec<f64> = (1..=20).map(|i| i as f64 * 0.05).collect();
let mut best_alpha = fixed_alpha.unwrap_or(0.3);
let mut best_beta = fixed_beta.unwrap_or(0.1);
let mut best_sse = holt_sse(data, best_alpha, best_beta);
if fixed_alpha.is_none() || fixed_beta.is_none() {
for &a in &grid {
for &b in &grid {
let alpha = fixed_alpha.unwrap_or(a);
let beta = fixed_beta.unwrap_or(b);
let sse = holt_sse(data, alpha, beta);
if sse < best_sse {
best_sse = sse;
best_alpha = alpha;
best_beta = beta;
}
}
}
}
if fixed_alpha.is_none() {
best_alpha = golden_section_1d(|a| holt_sse(data, a, best_beta), 1e-4, 1.0 - 1e-10);
}
if fixed_beta.is_none() {
best_beta = golden_section_1d(|b| holt_sse(data, best_alpha, b), 1e-4, 1.0 - 1e-10);
}
Ok((best_alpha, best_beta))
}
fn golden_section_1d(f: impl Fn(f64) -> f64, lo: f64, hi: f64) -> f64 {
let gr = (5.0_f64.sqrt() - 1.0) / 2.0;
let mut a = lo;
let mut b = hi;
let mut x1 = b - gr * (b - a);
let mut x2 = a + gr * (b - a);
let mut f1 = f(x1);
let mut f2 = f(x2);
for _ in 0..300 {
if (b - a).abs() < 1e-8 {
break;
}
if f1 < f2 {
b = x2;
x2 = x1;
f2 = f1;
x1 = b - gr * (b - a);
f1 = f(x1);
} else {
a = x1;
x1 = x2;
f1 = f2;
x2 = a + gr * (b - a);
f2 = f(x2);
}
}
(a + b) / 2.0
}
#[derive(Debug, Clone)]
pub struct HoltWinters {
alpha: f64,
beta: f64,
gamma: f64,
period: usize,
seasonal_type: SeasonalType,
level: f64,
trend: f64,
seasonals: Vec<f64>,
n_obs: usize,
}
impl HoltWinters {
pub fn fit(
data: &[f64],
period: usize,
seasonal: SeasonalType,
alpha: Option<f64>,
beta: Option<f64>,
gamma: Option<f64>,
) -> Result<Self> {
if period < 2 {
return Err(TimeSeriesError::InvalidParameter {
name: "period".to_string(),
message: "Seasonal period must be >= 2".to_string(),
});
}
if data.len() < 2 * period {
return Err(TimeSeriesError::InsufficientData {
message: "Holt-Winters requires at least 2 full seasonal cycles".to_string(),
required: 2 * period,
actual: data.len(),
});
}
if seasonal == SeasonalType::Multiplicative {
if data.iter().any(|&v| v <= 0.0) {
return Err(TimeSeriesError::InvalidInput(
"Multiplicative Holt-Winters requires all positive data values".to_string(),
));
}
}
validate_smoothing_param("alpha", alpha)?;
validate_smoothing_param("beta", beta)?;
validate_smoothing_param("gamma", gamma)?;
let (opt_alpha, opt_beta, opt_gamma) =
optimize_hw_params(data, period, seasonal, alpha, beta, gamma)?;
let (l, b, s) = hw_final_state(data, period, seasonal, opt_alpha, opt_beta, opt_gamma);
Ok(Self {
alpha: opt_alpha,
beta: opt_beta,
gamma: opt_gamma,
period,
seasonal_type: seasonal,
level: l,
trend: b,
seasonals: s,
n_obs: data.len(),
})
}
pub fn forecast(&self, h: usize) -> Vec<f64> {
let m = self.period;
(1..=h)
.map(|k| {
let s_idx = (self.n_obs - m + (k - 1) % m) % m;
let s = self.seasonals[s_idx];
match self.seasonal_type {
SeasonalType::Additive => self.level + (k as f64) * self.trend + s,
SeasonalType::Multiplicative => (self.level + (k as f64) * self.trend) * s,
}
})
.collect()
}
pub fn fitted_values(&self, data: &[f64]) -> Vec<f64> {
let m = self.period;
let init = hw_initial_components(data, m, self.seasonal_type);
let mut level = init.0;
let mut trend = init.1;
let mut seasonals = init.2;
let mut fitted = Vec::with_capacity(data.len());
for (t, &y) in data.iter().enumerate() {
let s_lag = seasonals[t % m];
let yhat = match self.seasonal_type {
SeasonalType::Additive => level + trend + s_lag,
SeasonalType::Multiplicative => (level + trend) * s_lag,
};
fitted.push(yhat);
let l_prev = level;
match self.seasonal_type {
SeasonalType::Additive => {
level = self.alpha * (y - s_lag) + (1.0 - self.alpha) * (level + trend);
trend = self.beta * (level - l_prev) + (1.0 - self.beta) * trend;
seasonals[t % m] =
self.gamma * (y - level) + (1.0 - self.gamma) * s_lag;
}
SeasonalType::Multiplicative => {
let safe_s = if s_lag.abs() < 1e-12 { 1e-12 } else { s_lag };
level =
self.alpha * (y / safe_s) + (1.0 - self.alpha) * (level + trend);
trend = self.beta * (level - l_prev) + (1.0 - self.beta) * trend;
let denom = level + trend;
let safe_denom = if denom.abs() < 1e-12 { 1e-12 } else { denom };
seasonals[t % m] =
self.gamma * (y / safe_denom) + (1.0 - self.gamma) * s_lag;
}
}
}
fitted
}
pub fn aic(&self, data: &[f64]) -> f64 {
let fitted = self.fitted_values(data);
let n = data.len() as f64;
let sse: f64 = data
.iter()
.zip(fitted.iter())
.map(|(&y, &yhat)| (y - yhat).powi(2))
.sum();
let sigma2 = sse / n;
let k = 3 + 2 + self.period;
let log_lik = -0.5 * n * (1.0 + (2.0 * std::f64::consts::PI * sigma2).ln());
-2.0 * log_lik + 2.0 * k as f64
}
pub fn alpha(&self) -> f64 {
self.alpha
}
pub fn beta(&self) -> f64 {
self.beta
}
pub fn gamma(&self) -> f64 {
self.gamma
}
pub fn period(&self) -> usize {
self.period
}
}
fn hw_initial_components(
data: &[f64],
period: usize,
seasonal_type: SeasonalType,
) -> (f64, f64, Vec<f64>) {
let level: f64 = data[..period].iter().sum::<f64>() / period as f64;
let trend = if data.len() >= 2 * period {
let second_avg: f64 = data[period..2 * period].iter().sum::<f64>() / period as f64;
(second_avg - level) / period as f64
} else {
0.0
};
let n_complete = data.len() / period;
let mut seasonals = vec![0.0_f64; period];
for i in 0..period {
let sum: f64 = (0..n_complete)
.map(|j| {
let idx = j * period + i;
if idx < data.len() {
match seasonal_type {
SeasonalType::Additive => {
data[idx] - (level + (idx as f64 + 1.0) * trend)
}
SeasonalType::Multiplicative => {
let denom = level + (idx as f64 + 1.0) * trend;
if denom.abs() < 1e-12 {
1.0
} else {
data[idx] / denom
}
}
}
} else {
match seasonal_type {
SeasonalType::Additive => 0.0,
SeasonalType::Multiplicative => 1.0,
}
}
})
.sum();
seasonals[i] = sum / n_complete as f64;
}
if seasonal_type == SeasonalType::Multiplicative {
let mean_s = seasonals.iter().sum::<f64>() / period as f64;
if mean_s.abs() > 1e-12 {
for s in &mut seasonals {
*s /= mean_s;
}
}
}
(level, trend, seasonals)
}
fn hw_final_state(
data: &[f64],
period: usize,
seasonal_type: SeasonalType,
alpha: f64,
beta: f64,
gamma: f64,
) -> (f64, f64, Vec<f64>) {
let (mut level, mut trend, mut seasonals) =
hw_initial_components(data, period, seasonal_type);
for (t, &y) in data.iter().enumerate() {
let s_lag = seasonals[t % period];
let l_prev = level;
match seasonal_type {
SeasonalType::Additive => {
level = alpha * (y - s_lag) + (1.0 - alpha) * (level + trend);
trend = beta * (level - l_prev) + (1.0 - beta) * trend;
seasonals[t % period] = gamma * (y - level) + (1.0 - gamma) * s_lag;
}
SeasonalType::Multiplicative => {
let safe_s = if s_lag.abs() < 1e-12 { 1e-12 } else { s_lag };
level = alpha * (y / safe_s) + (1.0 - alpha) * (level + trend);
trend = beta * (level - l_prev) + (1.0 - beta) * trend;
let denom = level + trend;
let safe_denom = if denom.abs() < 1e-12 { 1e-12 } else { denom };
seasonals[t % period] =
gamma * (y / safe_denom) + (1.0 - gamma) * s_lag;
}
}
}
(level, trend, seasonals)
}
fn hw_sse(
data: &[f64],
period: usize,
seasonal_type: SeasonalType,
alpha: f64,
beta: f64,
gamma: f64,
) -> f64 {
let (mut level, mut trend, mut seasonals) =
hw_initial_components(data, period, seasonal_type);
let mut sse = 0.0;
for (t, &y) in data.iter().enumerate() {
let s_lag = seasonals[t % period];
let yhat = match seasonal_type {
SeasonalType::Additive => level + trend + s_lag,
SeasonalType::Multiplicative => (level + trend) * s_lag,
};
let err = y - yhat;
sse += err * err;
let l_prev = level;
match seasonal_type {
SeasonalType::Additive => {
level = alpha * (y - s_lag) + (1.0 - alpha) * (level + trend);
trend = beta * (level - l_prev) + (1.0 - beta) * trend;
seasonals[t % period] = gamma * (y - level) + (1.0 - gamma) * s_lag;
}
SeasonalType::Multiplicative => {
let safe_s = if s_lag.abs() < 1e-12 { 1e-12 } else { s_lag };
level = alpha * (y / safe_s) + (1.0 - alpha) * (level + trend);
trend = beta * (level - l_prev) + (1.0 - beta) * trend;
let denom = level + trend;
let safe_denom = if denom.abs() < 1e-12 { 1e-12 } else { denom };
seasonals[t % period] =
gamma * (y / safe_denom) + (1.0 - gamma) * s_lag;
}
}
}
sse
}
fn optimize_hw_params(
data: &[f64],
period: usize,
seasonal_type: SeasonalType,
fixed_alpha: Option<f64>,
fixed_beta: Option<f64>,
fixed_gamma: Option<f64>,
) -> Result<(f64, f64, f64)> {
let coarse: Vec<f64> = vec![0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.8];
let mut best_alpha = fixed_alpha.unwrap_or(0.3);
let mut best_beta = fixed_beta.unwrap_or(0.1);
let mut best_gamma = fixed_gamma.unwrap_or(0.2);
let mut best_sse = hw_sse(data, period, seasonal_type, best_alpha, best_beta, best_gamma);
let alpha_candidates: Vec<f64> = if fixed_alpha.is_some() {
{ let v = fixed_alpha.expect("fixed_alpha is Some (checked above)"); vec![v] }
} else {
coarse.clone()
};
let beta_candidates: Vec<f64> = if fixed_beta.is_some() {
{ let v = fixed_beta.expect("fixed_beta is Some (checked above)"); vec![v] }
} else {
coarse.clone()
};
let gamma_candidates: Vec<f64> = if fixed_gamma.is_some() {
{ let v = fixed_gamma.expect("fixed_gamma is Some (checked above)"); vec![v] }
} else {
coarse.clone()
};
for &a in &alpha_candidates {
for &b in &beta_candidates {
for &g in &gamma_candidates {
let sse = hw_sse(data, period, seasonal_type, a, b, g);
if sse < best_sse {
best_sse = sse;
best_alpha = a;
best_beta = b;
best_gamma = g;
}
}
}
}
for _ in 0..5 {
if fixed_alpha.is_none() {
best_alpha = golden_section_1d(
|a| hw_sse(data, period, seasonal_type, a, best_beta, best_gamma),
1e-4,
1.0 - 1e-10,
);
}
if fixed_beta.is_none() {
best_beta = golden_section_1d(
|b| hw_sse(data, period, seasonal_type, best_alpha, b, best_gamma),
1e-4,
1.0 - 1e-10,
);
}
if fixed_gamma.is_none() {
best_gamma = golden_section_1d(
|g| hw_sse(data, period, seasonal_type, best_alpha, best_beta, g),
1e-4,
1.0 - 1e-10,
);
}
}
Ok((best_alpha, best_beta, best_gamma))
}
#[cfg(test)]
mod tests {
use super::*;
fn seasonal_data_additive() -> Vec<f64> {
vec![
10.0, 14.0, 12.0, 8.0,
11.0, 15.0, 13.0, 9.0,
12.0, 16.0, 14.0, 10.0,
]
}
fn seasonal_data_multiplicative() -> Vec<f64> {
vec![
100.0, 140.0, 120.0, 80.0,
110.0, 154.0, 132.0, 88.0,
121.0, 169.4, 145.2, 96.8,
]
}
#[test]
fn test_ses_fit_auto() {
let data: Vec<f64> = (1..=20).map(|i| i as f64 + 0.1 * (i % 3) as f64).collect();
let model = SimpleExponentialSmoothing::fit(&data, None).expect("failed to create model");
assert!(model.alpha() > 0.0 && model.alpha() <= 1.0, "alpha out of range");
}
#[test]
fn test_ses_fit_fixed_alpha() {
let data = vec![1.0, 2.0, 3.0, 4.0, 5.0];
let model = SimpleExponentialSmoothing::fit(&data, Some(0.3)).expect("failed to create model");
assert!((model.alpha() - 0.3).abs() < 1e-10);
}
#[test]
fn test_ses_forecast_constant() {
let data = vec![1.0, 2.0, 3.0, 4.0, 5.0];
let model = SimpleExponentialSmoothing::fit(&data, Some(0.5)).expect("failed to create model");
let fcast = model.forecast(5);
for &f in &fcast {
assert!((f - fcast[0]).abs() < 1e-10, "SES forecast should be flat");
}
}
#[test]
fn test_ses_fitted_length() {
let data = vec![1.0, 2.0, 3.0, 4.0, 5.0, 6.0];
let model = SimpleExponentialSmoothing::fit(&data, Some(0.4)).expect("failed to create model");
let fitted = model.fitted_values(&data);
assert_eq!(fitted.len(), data.len());
}
#[test]
fn test_ses_converges_on_constant() {
let data = vec![5.0_f64; 30];
let model = SimpleExponentialSmoothing::fit(&data, None).expect("failed to create model");
let fcast = model.forecast(1);
assert!((fcast[0] - 5.0).abs() < 1e-6, "SES should converge on constant series");
}
#[test]
fn test_ses_invalid_alpha() {
let data = vec![1.0, 2.0, 3.0];
assert!(SimpleExponentialSmoothing::fit(&data, Some(0.0)).is_err());
assert!(SimpleExponentialSmoothing::fit(&data, Some(1.5)).is_err());
}
#[test]
fn test_ses_insufficient_data() {
assert!(SimpleExponentialSmoothing::fit(&[1.0], None).is_err());
}
#[test]
fn test_holt_fit_and_forecast() {
let data: Vec<f64> = (1..=20).map(|i| i as f64).collect();
let model = HoltLinear::fit(&data, None, None).expect("failed to create model");
let fcast = model.forecast(5);
assert_eq!(fcast.len(), 5);
for w in fcast.windows(2) {
assert!(w[1] > w[0], "linear trend forecasts must be increasing");
}
}
#[test]
fn test_holt_damped_forecast() {
let data: Vec<f64> = (1..=20).map(|i| i as f64).collect();
let model = HoltLinear::fit(&data, None, None).expect("failed to create model");
let undamped = model.forecast(10);
let damped = model.damped_forecast(10, 0.9);
assert_eq!(damped.len(), 10);
let total_undamped: f64 = undamped.iter().sum();
let total_damped: f64 = damped.iter().sum();
assert!(total_damped < total_undamped, "damped should be less than undamped");
}
#[test]
fn test_holt_insufficient_data() {
assert!(HoltLinear::fit(&[1.0, 2.0, 3.0], None, None).is_err());
}
#[test]
fn test_hw_additive_fit() {
let data = seasonal_data_additive();
let model = HoltWinters::fit(&data, 4, SeasonalType::Additive, None, None, None).expect("failed to create model");
assert!(model.alpha() > 0.0 && model.alpha() <= 1.0);
assert!(model.beta() > 0.0 && model.beta() <= 1.0);
assert!(model.gamma() > 0.0 && model.gamma() <= 1.0);
}
#[test]
fn test_hw_additive_forecast_length() {
let data = seasonal_data_additive();
let model =
HoltWinters::fit(&data, 4, SeasonalType::Additive, Some(0.3), Some(0.1), Some(0.2))
.expect("unexpected None or Err");
let fcast = model.forecast(8);
assert_eq!(fcast.len(), 8);
}
#[test]
fn test_hw_additive_fitted_values() {
let data = seasonal_data_additive();
let model =
HoltWinters::fit(&data, 4, SeasonalType::Additive, Some(0.4), Some(0.1), Some(0.3))
.expect("unexpected None or Err");
let fitted = model.fitted_values(&data);
assert_eq!(fitted.len(), data.len());
}
#[test]
fn test_hw_multiplicative_fit() {
let data = seasonal_data_multiplicative();
let model =
HoltWinters::fit(&data, 4, SeasonalType::Multiplicative, None, None, None).expect("unexpected None or Err");
let fcast = model.forecast(4);
assert_eq!(fcast.len(), 4);
for &f in &fcast {
assert!(f.is_finite(), "forecast must be finite");
assert!(f > 0.0, "multiplicative forecast must be positive for positive data");
}
}
#[test]
fn test_hw_aic() {
let data = seasonal_data_additive();
let model =
HoltWinters::fit(&data, 4, SeasonalType::Additive, Some(0.3), Some(0.1), Some(0.2))
.expect("unexpected None or Err");
let aic = model.aic(&data);
assert!(aic.is_finite(), "AIC must be finite");
}
#[test]
fn test_hw_period_too_small() {
let data = seasonal_data_additive();
assert!(HoltWinters::fit(&data, 1, SeasonalType::Additive, None, None, None).is_err());
}
#[test]
fn test_hw_insufficient_data() {
let data = vec![1.0, 2.0, 3.0, 4.0]; assert!(HoltWinters::fit(&data, 4, SeasonalType::Additive, None, None, None).is_err());
}
#[test]
fn test_hw_multiplicative_requires_positive() {
let data = vec![
1.0, -2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0,
];
assert!(
HoltWinters::fit(&data, 4, SeasonalType::Multiplicative, None, None, None).is_err()
);
}
#[test]
fn test_hw_seasonal_pattern_preserved() {
let data = vec![
20.0, 10.0, 5.0, 15.0,
22.0, 11.0, 6.0, 16.0,
24.0, 12.0, 7.0, 17.0,
];
let model =
HoltWinters::fit(&data, 4, SeasonalType::Additive, Some(0.3), Some(0.1), Some(0.3))
.expect("unexpected None or Err");
let fcast = model.forecast(4);
assert!(
fcast[0] > fcast[2],
"Q1 forecast should be greater than Q3 forecast: {:.2} vs {:.2}",
fcast[0], fcast[2]
);
}
}