mod elastic_net;
mod lasso;
mod lbfgs;
mod logistic;
pub(crate) mod qr;
mod regression;
pub(crate) mod svd;
pub use elastic_net::ElasticNet;
pub use lasso::LassoRegression;
pub use logistic::{LogisticRegression, Penalty, Solver};
pub use regression::LinearRegression;
use crate::dataset::Dataset;
use crate::error::Result;
#[derive(Clone)]
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
#[non_exhaustive]
pub struct Ridge {
inner: LinearRegression,
}
impl Ridge {
pub fn new(alpha: f64) -> Self {
Self {
inner: LinearRegression::new().alpha(alpha),
}
}
pub fn fit(&mut self, data: &Dataset) -> Result<()> {
self.inner.fit(data)
}
pub fn predict(&self, features: &[Vec<f64>]) -> Result<Vec<f64>> {
self.inner.predict(features)
}
pub fn coefficients(&self) -> &[f64] {
self.inner.coefficients()
}
pub fn intercept(&self) -> f64 {
self.inner.intercept()
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_ridge_alias() {
let features = vec![vec![1.0, 2.0, 3.0, 4.0, 5.0]];
let target = vec![2.0, 4.0, 6.0, 8.0, 10.0];
let data = Dataset::new(features, target, vec!["x".into()], "y");
let mut ridge = Ridge::new(1.0);
ridge.fit(&data).unwrap();
let mut lr = LinearRegression::new().alpha(1.0);
lr.fit(&data).unwrap();
assert!(
(ridge.coefficients()[0] - lr.coefficients()[0]).abs() < 1e-10,
"Ridge and LinearRegression(alpha=1.0) should produce identical coefficients"
);
assert!(
(ridge.intercept() - lr.intercept()).abs() < 1e-10,
"Ridge and LinearRegression(alpha=1.0) should produce identical intercepts"
);
assert!(ridge.coefficients()[0] < 2.0);
assert!(ridge.coefficients()[0] > 1.0);
}
}