rs_ml/regression/
linear.rs1use ndarray::{Array1, Array2, Axis};
4use ndarray_linalg::Inverse;
5
6use crate::Estimator;
7
8use super::Regressor;
9
10#[derive(Debug, Clone, Copy)]
31pub struct OrdinaryLeastSquaresEstimator;
32
33#[derive(Debug, Clone)]
35pub struct OrdinaryLeastSquaresRegressor {
36 beta: Array2<f64>,
37}
38
39impl Estimator<(&Array2<f64>, &Array1<f64>)> for OrdinaryLeastSquaresEstimator {
40 type Estimator = OrdinaryLeastSquaresRegressor;
41
42 fn fit(&self, input: &(&Array2<f64>, &Array1<f64>)) -> Option<Self::Estimator> {
43 let (x, y) = input;
44
45 let nrows = x.nrows();
46 let mut x_added_one = x.to_owned().clone();
47 x_added_one.push_column(Array1::ones(nrows).view()).ok()?;
48
49 let binding = y.view().insert_axis(Axis(0));
50 let transformed_y = binding.t();
51 let inv_gram_matrix: Array2<f64> = x_added_one.t().dot(&x_added_one).inv().ok()?;
52
53 let beta = inv_gram_matrix.dot(&x_added_one.t().dot(&transformed_y));
54
55 Some(OrdinaryLeastSquaresRegressor { beta })
56 }
57}
58
59impl Regressor<Array2<f64>, Array1<f64>> for OrdinaryLeastSquaresRegressor {
60 fn predict(&self, input: &Array2<f64>) -> Option<Array1<f64>> {
61 let nrows = input.nrows();
62 let mut x_added_one = input.to_owned().clone();
63 x_added_one.push_column(Array1::ones(nrows).view()).ok()?;
64
65 let y = x_added_one.dot(&self.beta);
66
67 let binding = y.t().remove_axis(Axis(0));
68 Some(binding.to_owned())
69 }
70}