use crate::error::{Result, TransformError};
use scirs2_core::ndarray::{Array1, Array2, ArrayView1, ArrayView2};
#[derive(Debug, Clone)]
pub struct OnlineRidgeRegression {
pub lambda: f64,
weights: Option<Array1<f64>>,
precision: Option<Array2<f64>>,
n_features: usize,
}
impl OnlineRidgeRegression {
pub fn new(lambda: f64) -> Self {
Self {
lambda,
weights: None,
precision: None,
n_features: 0,
}
}
pub fn partial_fit(
&mut self,
x: ArrayView2<f64>,
y: ArrayView1<f64>,
) -> Result<()> {
let n_features = x.ncols();
if self.weights.is_none() {
self.n_features = n_features;
self.weights = Some(Array1::zeros(n_features));
let mut p = Array2::<f64>::eye(n_features);
p.mapv_inplace(|v| v / self.lambda);
self.precision = Some(p);
}
if x.ncols() != self.n_features {
return Err(TransformError::InvalidInput(format!(
"Expected {} features, got {}",
self.n_features,
x.ncols()
)));
}
let p = self.precision.as_mut().expect("precision initialized above");
let w = self.weights.as_mut().expect("weights initialized above");
for (xi, yi) in x.rows().into_iter().zip(y.iter()) {
let p_xi = p.dot(&xi);
let denom = 1.0 + xi.dot(&p_xi);
let k = p_xi.mapv(|v| v / denom);
for i in 0..self.n_features {
for j in 0..self.n_features {
p[[i, j]] -= k[i] * p_xi[j];
}
}
let err = yi - w.dot(&xi);
for i in 0..self.n_features {
w[i] += k[i] * err;
}
}
Ok(())
}
pub fn predict(&self, x: ArrayView2<f64>) -> Result<Array1<f64>> {
let w = self.weights.as_ref().ok_or_else(|| {
TransformError::NotFitted("OnlineRidgeRegression".to_string())
})?;
Ok(x.dot(w))
}
pub fn weights(&self) -> Option<&Array1<f64>> {
self.weights.as_ref()
}
}
#[derive(Debug, Clone)]
pub struct OnlineLasso {
pub lambda: f64,
pub eta: f64,
weights: Option<Array1<f64>>,
n_features: usize,
t: usize,
}
impl OnlineLasso {
pub fn new(lambda: f64, eta: f64) -> Self {
Self {
lambda,
eta,
weights: None,
n_features: 0,
t: 0,
}
}
pub fn partial_fit(
&mut self,
x: ArrayView2<f64>,
y: ArrayView1<f64>,
) -> Result<()> {
let n_features = x.ncols();
if self.weights.is_none() {
self.n_features = n_features;
self.weights = Some(Array1::zeros(n_features));
}
if x.ncols() != self.n_features {
return Err(TransformError::InvalidInput(format!(
"Expected {} features, got {}",
self.n_features,
x.ncols()
)));
}
let w = self.weights.as_mut().expect("weights initialized");
for (xi, yi) in x.rows().into_iter().zip(y.iter()) {
self.t += 1;
let pred = w.dot(&xi);
let err = yi - pred;
let eta_t = self.eta / (self.t as f64).sqrt();
for (wj, xj) in w.iter_mut().zip(xi.iter()) {
*wj += eta_t * err * xj;
}
let threshold = eta_t * self.lambda;
for wj in w.iter_mut() {
if *wj > threshold {
*wj -= threshold;
} else if *wj < -threshold {
*wj += threshold;
} else {
*wj = 0.0;
}
}
}
Ok(())
}
pub fn predict(&self, x: ArrayView2<f64>) -> Result<Array1<f64>> {
let w = self.weights.as_ref().ok_or_else(|| {
TransformError::NotFitted("OnlineLasso".to_string())
})?;
Ok(x.dot(w))
}
}
#[derive(Debug, Clone)]
pub struct PassiveAggressiveRegressor {
pub c: f64,
pub epsilon: f64,
weights: Option<Array1<f64>>,
n_features: usize,
}
impl PassiveAggressiveRegressor {
pub fn new(c: f64, epsilon: f64) -> Self {
Self {
c,
epsilon,
weights: None,
n_features: 0,
}
}
pub fn partial_fit(
&mut self,
x: ArrayView2<f64>,
y: ArrayView1<f64>,
) -> Result<()> {
let n_features = x.ncols();
if self.weights.is_none() {
self.n_features = n_features;
self.weights = Some(Array1::zeros(n_features));
}
if x.ncols() != self.n_features {
return Err(TransformError::InvalidInput(format!(
"Expected {} features, got {}",
self.n_features,
x.ncols()
)));
}
let w = self.weights.as_mut().expect("weights initialized");
for (xi, yi) in x.rows().into_iter().zip(y.iter()) {
let pred = w.dot(&xi);
let loss = (yi - pred).abs() - self.epsilon;
if loss <= 0.0 {
continue; }
let xi_sq = xi.dot(&xi);
let tau = if xi_sq < 1e-12 {
0.0
} else {
(loss / xi_sq).min(self.c)
};
let sign = if yi > &pred { 1.0 } else { -1.0 };
for (wj, xj) in w.iter_mut().zip(xi.iter()) {
*wj += tau * sign * xj;
}
}
Ok(())
}
pub fn predict(&self, x: ArrayView2<f64>) -> Result<Array1<f64>> {
let w = self.weights.as_ref().ok_or_else(|| {
TransformError::NotFitted("PassiveAggressiveRegressor".to_string())
})?;
Ok(x.dot(w))
}
}
#[derive(Debug, Clone)]
pub struct FtrlRegressor {
pub alpha: f64,
pub beta: f64,
pub eta: f64,
weights: Option<Array1<f64>>,
z: Option<Array1<f64>>,
n_acc: Option<Array1<f64>>,
n_features: usize,
t: usize,
}
impl FtrlRegressor {
pub fn new(alpha: f64, beta: f64, eta: f64) -> Self {
Self {
alpha,
beta,
eta,
weights: None,
z: None,
n_acc: None,
n_features: 0,
t: 0,
}
}
pub fn partial_fit(
&mut self,
x: ArrayView2<f64>,
y: ArrayView1<f64>,
) -> Result<()> {
let n_features = x.ncols();
if self.weights.is_none() {
self.n_features = n_features;
self.weights = Some(Array1::zeros(n_features));
self.z = Some(Array1::zeros(n_features));
self.n_acc = Some(Array1::zeros(n_features));
}
if x.ncols() != self.n_features {
return Err(TransformError::InvalidInput(format!(
"Expected {} features, got {}",
self.n_features,
x.ncols()
)));
}
let w = self.weights.as_mut().expect("weights initialized");
let z = self.z.as_mut().expect("z initialized");
let n = self.n_acc.as_mut().expect("n_acc initialized");
for (xi, yi) in x.rows().into_iter().zip(y.iter()) {
self.t += 1;
for j in 0..self.n_features {
let sign_z = if z[j] >= 0.0 { 1.0 } else { -1.0 };
if (z[j] * sign_z) <= self.alpha {
w[j] = 0.0;
} else {
w[j] = -(z[j] - sign_z * self.alpha)
/ ((self.beta + n[j].sqrt()) / self.eta + self.beta);
}
}
let pred = w.dot(&xi);
let grad = pred - yi;
for (j, xj) in xi.iter().enumerate() {
let gj = grad * xj;
let sigma = (((n[j] + gj * gj).sqrt() - n[j].sqrt()) / self.eta).max(0.0);
z[j] += gj - sigma * w[j];
n[j] += gj * gj;
}
}
Ok(())
}
pub fn predict(&self, x: ArrayView2<f64>) -> Result<Array1<f64>> {
let w = self.weights.as_ref().ok_or_else(|| {
TransformError::NotFitted("FtrlRegressor".to_string())
})?;
Ok(x.dot(w))
}
}