#[cfg(feature = "stats")]
use crate::response::Response;
use crate::{
error::{RegressionError, RegressionResult},
glm::{DispersionType, Glm},
link::Link,
num::Float,
response::Yval,
};
use num_traits::ToPrimitive;
#[cfg(feature = "stats")]
use statrs::distribution::Normal;
use std::marker::PhantomData;
pub struct Linear<L = link::Id>
where
L: Link<Linear<L>>,
{
_link: PhantomData<L>,
}
impl<Y, L> Yval<Linear<L>> for Y
where
Y: Float + ToPrimitive + ToString,
L: Link<Linear<L>>,
{
fn into_float<F: Float>(self) -> RegressionResult<F, F> {
F::from(self).ok_or_else(|| RegressionError::InvalidY(self.to_string()))
}
}
#[cfg(feature = "stats")]
impl<L> Response for Linear<L>
where
L: Link<Linear<L>>,
{
type DistributionType = Normal;
fn get_distribution(mu: f64, phi: f64) -> Self::DistributionType {
let sigma = phi.max(f64::MIN_POSITIVE).sqrt();
Normal::new(mu, sigma).unwrap()
}
}
impl<L> Glm for Linear<L>
where
L: Link<Linear<L>>,
{
type Link = L;
const DISPERSED: DispersionType = DispersionType::FreeDispersion;
fn log_partition<F: Float>(nat_par: F) -> F {
let half = F::from(0.5).unwrap();
half * nat_par * nat_par
}
fn variance<F: Float>(_mean: F) -> F {
F::one()
}
fn log_like_sat<F: Float>(y: F) -> F {
Self::log_partition(y)
}
}
pub(crate) mod link {
use super::*;
use crate::link::{Canonical, Link};
pub struct Id;
impl Canonical for Id {}
impl Link<Linear> for Id {
#[inline]
fn func<F: Float>(y: F) -> F {
y
}
#[inline]
fn func_inv<F: Float>(lin_pred: F) -> F {
lin_pred
}
}
}
#[cfg(test)]
mod tests {
use super::Linear;
use crate::{error::RegressionResult, model::ModelBuilder};
use approx::assert_abs_diff_eq;
use ndarray::array;
#[test]
fn id_closure() {
use crate::link::TestLink;
let x = crate::array![
-1e5, -100., -13., -2.0, -1.0, -0.025, -0.001, 0., 0.001, 0.04, 1.0, 2.5, 17., 128.,
1e5
];
super::link::Id::check_closure(&x);
super::link::Id::check_closure_y(&x);
}
#[test]
fn lin_reg() -> RegressionResult<(), f64> {
let beta = array![0.3, 1.2, -0.5];
let data_x = array![[-0.1, 0.2], [0.7, 0.5], [3.2, 0.1]];
let data_y = array![
beta[0] + beta[1] * data_x[[0, 0]] + beta[2] * data_x[[0, 1]],
beta[0] + beta[1] * data_x[[1, 0]] + beta[2] * data_x[[1, 1]],
beta[0] + beta[1] * data_x[[2, 0]] + beta[2] * data_x[[2, 1]],
];
let model = ModelBuilder::<Linear>::data(&data_y, &data_x).build()?;
let fit = model.fit_options().max_iter(10).fit()?;
assert_abs_diff_eq!(beta, fit.result, epsilon = 64.0 * f64::EPSILON);
let _lr: f64 = fit.lr_test();
Ok(())
}
}