use ndarray::prelude::*;
#[allow(unused_imports)]
use log::Level::*;
#[allow(unused_imports)]
use log::*;
use crate::types::*;
pub struct LogisticRegression {
nbclass: usize,
observations: Vec<(Array1<f64>, usize)>,
}
impl LogisticRegression {
pub fn new(nbclass: usize, observations: Vec<(Array1<f64>, usize)>) -> LogisticRegression {
LogisticRegression {
nbclass,
observations,
}
} }
impl Summation<Ix2> for LogisticRegression {
fn terms(&self) -> usize {
self.observations.len()
}
fn term_value(&self, coefficients: &Array2<f64>, term: usize) -> f64 {
let (ref x, term_class) = self.observations[term];
let mut dot_xi = Array1::<f64>::zeros(self.nbclass - 1);
let mut log_arg = 1.0f64;
for i in 0..self.nbclass - 1 {
assert_eq!(x.len(), coefficients.index_axis(Axis(0), i).len());
dot_xi[i] = x.dot(&coefficients.slice(s![i, ..]));
log_arg += dot_xi[i].exp();
}
let mut other_term = 0.;
if term_class < self.nbclass - 1 {
other_term = dot_xi[term_class];
}
let t_value = log_arg.ln() - other_term;
t_value
} }
impl SummationC1<Ix2> for LogisticRegression {
fn term_gradient(&self, w: &Array2<f64>, term: &usize, gradient: &mut Array2<f64>) {
let (ref x, term_class) = self.observations[*term];
let mut dot_xk = Array1::<f64>::zeros(self.nbclass - 1);
let mut den: f64 = 1.;
for k in 0..self.nbclass - 1 {
dot_xk[k] = x.dot(&w.slice(s![k, ..])).exp();
den += dot_xk[k];
}
for k in 0..self.nbclass - 1 {
let mut g_term: f64;
for j in 0..x.len() {
g_term = x[j] * dot_xk[k] / den;
if term_class == k {
g_term -= x[j];
}
gradient[[k, j]] = g_term;
}
}
} }