use linalg::{Matrix, BaseMatrix};
use linalg::Vector;
use learning::{LearningResult, SupModel};
use learning::toolkit::activ_fn::{ActivationFunc, Sigmoid};
use learning::toolkit::cost_fn::{CostFunc, CrossEntropyError};
use learning::optim::grad_desc::GradientDesc;
use learning::optim::{OptimAlgorithm, Optimizable};
use learning::error::Error;
#[derive(Debug)]
pub struct LogisticRegressor<A>
where A: OptimAlgorithm<BaseLogisticRegressor>
{
base: BaseLogisticRegressor,
alg: A,
}
impl Default for LogisticRegressor<GradientDesc> {
fn default() -> LogisticRegressor<GradientDesc> {
LogisticRegressor {
base: BaseLogisticRegressor::new(),
alg: GradientDesc::default(),
}
}
}
impl<A: OptimAlgorithm<BaseLogisticRegressor>> LogisticRegressor<A> {
pub fn new(alg: A) -> LogisticRegressor<A> {
LogisticRegressor {
base: BaseLogisticRegressor::new(),
alg: alg,
}
}
pub fn parameters(&self) -> Option<&Vector<f64>> {
self.base.parameters()
}
}
impl<A> SupModel<Matrix<f64>, Vector<f64>> for LogisticRegressor<A>
where A: OptimAlgorithm<BaseLogisticRegressor>
{
fn train(&mut self, inputs: &Matrix<f64>, targets: &Vector<f64>) -> LearningResult<()> {
let ones = Matrix::<f64>::ones(inputs.rows(), 1);
let full_inputs = ones.hcat(inputs);
let initial_params = vec![0.5; full_inputs.cols()];
let optimal_w = self.alg.optimize(&self.base, &initial_params[..], &full_inputs, targets);
self.base.set_parameters(Vector::new(optimal_w));
Ok(())
}
fn predict(&self, inputs: &Matrix<f64>) -> LearningResult<Vector<f64>> {
if let Some(v) = self.base.parameters() {
let ones = Matrix::<f64>::ones(inputs.rows(), 1);
let full_inputs = ones.hcat(inputs);
Ok((full_inputs * v).apply(&Sigmoid::func))
} else {
Err(Error::new_untrained())
}
}
}
#[derive(Debug)]
pub struct BaseLogisticRegressor {
parameters: Option<Vector<f64>>,
}
impl BaseLogisticRegressor {
fn new() -> BaseLogisticRegressor {
BaseLogisticRegressor { parameters: None }
}
}
impl BaseLogisticRegressor {
fn parameters(&self) -> Option<&Vector<f64>> {
self.parameters.as_ref()
}
fn set_parameters(&mut self, params: Vector<f64>) {
self.parameters = Some(params);
}
}
impl Optimizable for BaseLogisticRegressor {
type Inputs = Matrix<f64>;
type Targets = Vector<f64>;
fn compute_grad(&self,
params: &[f64],
inputs: &Matrix<f64>,
targets: &Vector<f64>)
-> (f64, Vec<f64>) {
let beta_vec = Vector::new(params.to_vec());
let outputs = (inputs * beta_vec).apply(&Sigmoid::func);
let cost = CrossEntropyError::cost(&outputs, targets);
let grad = (inputs.transpose() * (outputs - targets)) / (inputs.rows() as f64);
(cost, grad.into_vec())
}
}